var/home/core/zuul-output/0000755000175000017500000000000015066552524014537 5ustar corecorevar/home/core/zuul-output/logs/0000755000175000017500000000000015066565507015510 5ustar corecorevar/home/core/zuul-output/logs/kubelet.log0000644000000000000000004714472215066565501017721 0ustar rootrootSep 29 18:56:26 crc systemd[1]: Starting Kubernetes Kubelet... Sep 29 18:56:26 crc restorecon[4557]: Relabeled /var/lib/kubelet/config.json from system_u:object_r:unlabeled_t:s0 to system_u:object_r:container_var_lib_t:s0 Sep 29 18:56:26 crc restorecon[4557]: /var/lib/kubelet/device-plugins not reset as customized by admin to system_u:object_r:container_file_t:s0 Sep 29 18:56:26 crc restorecon[4557]: /var/lib/kubelet/device-plugins/kubelet.sock not reset as customized by admin to system_u:object_r:container_file_t:s0 Sep 29 18:56:26 crc restorecon[4557]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/volumes/kubernetes.io~configmap/nginx-conf/..2025_02_23_05_40_35.4114275528/nginx.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Sep 29 18:56:26 crc restorecon[4557]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Sep 29 18:56:26 crc restorecon[4557]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/22e96971 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Sep 29 18:56:26 crc restorecon[4557]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/21c98286 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Sep 29 18:56:26 crc restorecon[4557]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/0f1869e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Sep 29 18:56:26 crc restorecon[4557]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Sep 29 18:56:26 crc restorecon[4557]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/46889d52 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Sep 29 18:56:26 crc restorecon[4557]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/5b6a5969 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c963 Sep 29 18:56:26 crc restorecon[4557]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/6c7921f5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Sep 29 18:56:26 crc restorecon[4557]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/4804f443 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Sep 29 18:56:26 crc restorecon[4557]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/2a46b283 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Sep 29 18:56:26 crc restorecon[4557]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/a6b5573e not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Sep 29 18:56:26 crc restorecon[4557]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/4f88ee5b not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Sep 29 18:56:26 crc restorecon[4557]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/5a4eee4b not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c963 Sep 29 18:56:26 crc restorecon[4557]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/cd87c521 not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Sep 29 18:56:26 crc restorecon[4557]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Sep 29 18:56:26 crc restorecon[4557]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_33_42.2574241751 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Sep 29 18:56:26 crc restorecon[4557]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_33_42.2574241751/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Sep 29 18:56:26 crc restorecon[4557]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Sep 29 18:56:26 crc restorecon[4557]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Sep 29 18:56:26 crc restorecon[4557]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Sep 29 18:56:26 crc restorecon[4557]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/38602af4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Sep 29 18:56:26 crc restorecon[4557]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/1483b002 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Sep 29 18:56:26 crc restorecon[4557]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/0346718b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Sep 29 18:56:26 crc restorecon[4557]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/d3ed4ada not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Sep 29 18:56:26 crc restorecon[4557]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/3bb473a5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Sep 29 18:56:26 crc restorecon[4557]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/8cd075a9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Sep 29 18:56:26 crc restorecon[4557]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/00ab4760 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Sep 29 18:56:26 crc restorecon[4557]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/54a21c09 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Sep 29 18:56:26 crc restorecon[4557]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c589,c726 Sep 29 18:56:26 crc restorecon[4557]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/70478888 not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Sep 29 18:56:26 crc restorecon[4557]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/43802770 not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Sep 29 18:56:26 crc restorecon[4557]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/955a0edc not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Sep 29 18:56:26 crc restorecon[4557]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/bca2d009 not reset as customized by admin to system_u:object_r:container_file_t:s0:c140,c1009 Sep 29 18:56:26 crc restorecon[4557]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/b295f9bd not reset as customized by admin to system_u:object_r:container_file_t:s0:c589,c726 Sep 29 18:56:26 crc restorecon[4557]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Sep 29 18:56:26 crc restorecon[4557]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..2025_02_23_05_21_22.3617465230 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Sep 29 18:56:26 crc restorecon[4557]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..2025_02_23_05_21_22.3617465230/cnibincopy.sh not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Sep 29 18:56:26 crc restorecon[4557]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Sep 29 18:56:26 crc restorecon[4557]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/cnibincopy.sh not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Sep 29 18:56:26 crc restorecon[4557]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Sep 29 18:56:26 crc restorecon[4557]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..2025_02_23_05_21_22.2050650026 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Sep 29 18:56:26 crc restorecon[4557]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..2025_02_23_05_21_22.2050650026/allowlist.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Sep 29 18:56:26 crc restorecon[4557]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Sep 29 18:56:26 crc restorecon[4557]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/allowlist.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Sep 29 18:56:26 crc restorecon[4557]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Sep 29 18:56:26 crc restorecon[4557]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/bc46ea27 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Sep 29 18:56:26 crc restorecon[4557]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/5731fc1b not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Sep 29 18:56:26 crc restorecon[4557]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/5e1b2a3c not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Sep 29 18:56:26 crc restorecon[4557]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/943f0936 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Sep 29 18:56:26 crc restorecon[4557]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/3f764ee4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Sep 29 18:56:26 crc restorecon[4557]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/8695e3f9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Sep 29 18:56:26 crc restorecon[4557]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/aed7aa86 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Sep 29 18:56:26 crc restorecon[4557]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/c64d7448 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Sep 29 18:56:26 crc restorecon[4557]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/0ba16bd2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Sep 29 18:56:26 crc restorecon[4557]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/207a939f not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Sep 29 18:56:26 crc restorecon[4557]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/54aa8cdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Sep 29 18:56:26 crc restorecon[4557]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/1f5fa595 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Sep 29 18:56:26 crc restorecon[4557]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/bf9c8153 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Sep 29 18:56:26 crc restorecon[4557]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/47fba4ea not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Sep 29 18:56:26 crc restorecon[4557]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/7ae55ce9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Sep 29 18:56:26 crc restorecon[4557]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/7906a268 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Sep 29 18:56:26 crc restorecon[4557]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/ce43fa69 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Sep 29 18:56:26 crc restorecon[4557]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/7fc7ea3a not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Sep 29 18:56:26 crc restorecon[4557]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/d8c38b7d not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Sep 29 18:56:26 crc restorecon[4557]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/9ef015fb not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Sep 29 18:56:26 crc restorecon[4557]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/b9db6a41 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Sep 29 18:56:26 crc restorecon[4557]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Sep 29 18:56:26 crc restorecon[4557]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/b1733d79 not reset as customized by admin to system_u:object_r:container_file_t:s0:c476,c820 Sep 29 18:56:26 crc restorecon[4557]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/afccd338 not reset as customized by admin to system_u:object_r:container_file_t:s0:c272,c818 Sep 29 18:56:26 crc restorecon[4557]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/9df0a185 not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Sep 29 18:56:26 crc restorecon[4557]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/18938cf8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c476,c820 Sep 29 18:56:26 crc restorecon[4557]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/7ab4eb23 not reset as customized by admin to system_u:object_r:container_file_t:s0:c272,c818 Sep 29 18:56:26 crc restorecon[4557]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/56930be6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Sep 29 18:56:26 crc restorecon[4557]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Sep 29 18:56:26 crc restorecon[4557]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides/..2025_02_23_05_21_35.630010865 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Sep 29 18:56:26 crc restorecon[4557]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Sep 29 18:56:26 crc restorecon[4557]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Sep 29 18:56:26 crc restorecon[4557]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..2025_02_23_05_21_35.1088506337 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Sep 29 18:56:26 crc restorecon[4557]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..2025_02_23_05_21_35.1088506337/ovnkube.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Sep 29 18:56:26 crc restorecon[4557]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Sep 29 18:56:26 crc restorecon[4557]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/ovnkube.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Sep 29 18:56:26 crc restorecon[4557]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Sep 29 18:56:26 crc restorecon[4557]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/0d8e3722 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Sep 29 18:56:26 crc restorecon[4557]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/d22b2e76 not reset as customized by admin to system_u:object_r:container_file_t:s0:c382,c850 Sep 29 18:56:26 crc restorecon[4557]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/e036759f not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Sep 29 18:56:26 crc restorecon[4557]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/2734c483 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Sep 29 18:56:26 crc restorecon[4557]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/57878fe7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Sep 29 18:56:26 crc restorecon[4557]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/3f3c2e58 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Sep 29 18:56:26 crc restorecon[4557]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/375bec3e not reset as customized by admin to system_u:object_r:container_file_t:s0:c382,c850 Sep 29 18:56:26 crc restorecon[4557]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/7bc41e08 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Sep 29 18:56:26 crc restorecon[4557]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 29 18:56:26 crc restorecon[4557]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/48c7a72d not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 29 18:56:26 crc restorecon[4557]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/4b66701f not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 29 18:56:26 crc restorecon[4557]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/a5a1c202 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 29 18:56:26 crc restorecon[4557]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 29 18:56:26 crc restorecon[4557]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 29 18:56:26 crc restorecon[4557]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666/additional-cert-acceptance-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 29 18:56:26 crc restorecon[4557]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666/additional-pod-admission-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 29 18:56:26 crc restorecon[4557]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 29 18:56:26 crc restorecon[4557]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/additional-cert-acceptance-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 29 18:56:26 crc restorecon[4557]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/additional-pod-admission-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 29 18:56:26 crc restorecon[4557]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 29 18:56:26 crc restorecon[4557]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides/..2025_02_23_05_21_40.1388695756 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 29 18:56:26 crc restorecon[4557]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 29 18:56:26 crc restorecon[4557]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 29 18:56:26 crc restorecon[4557]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/26f3df5b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 29 18:56:26 crc restorecon[4557]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/6d8fb21d not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 29 18:56:26 crc restorecon[4557]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/50e94777 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 29 18:56:26 crc restorecon[4557]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/208473b3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 29 18:56:26 crc restorecon[4557]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/ec9e08ba not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 29 18:56:26 crc restorecon[4557]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/3b787c39 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 29 18:56:26 crc restorecon[4557]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/208eaed5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 29 18:56:26 crc restorecon[4557]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/93aa3a2b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 29 18:56:26 crc restorecon[4557]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/3c697968 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 29 18:56:26 crc restorecon[4557]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Sep 29 18:56:26 crc restorecon[4557]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/ba950ec9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Sep 29 18:56:26 crc restorecon[4557]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/cb5cdb37 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Sep 29 18:56:26 crc restorecon[4557]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/f2df9827 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Sep 29 18:56:26 crc restorecon[4557]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 29 18:56:26 crc restorecon[4557]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..2025_02_23_05_22_30.473230615 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 29 18:56:26 crc restorecon[4557]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..2025_02_23_05_22_30.473230615/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 29 18:56:26 crc restorecon[4557]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 29 18:56:26 crc restorecon[4557]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 29 18:56:26 crc restorecon[4557]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 29 18:56:26 crc restorecon[4557]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 29 18:56:26 crc restorecon[4557]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 29 18:56:26 crc restorecon[4557]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_24_06_22_02.1904938450 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 29 18:56:26 crc restorecon[4557]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_24_06_22_02.1904938450/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 29 18:56:26 crc restorecon[4557]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 29 18:56:26 crc restorecon[4557]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/fedaa673 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 29 18:56:26 crc restorecon[4557]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/9ca2df95 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 29 18:56:26 crc restorecon[4557]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/b2d7460e not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 29 18:56:26 crc restorecon[4557]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/2207853c not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 29 18:56:26 crc restorecon[4557]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/241c1c29 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 29 18:56:26 crc restorecon[4557]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/2d910eaf not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 29 18:56:26 crc restorecon[4557]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Sep 29 18:56:26 crc restorecon[4557]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Sep 29 18:56:26 crc restorecon[4557]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..2025_02_23_05_23_49.3726007728 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Sep 29 18:56:26 crc restorecon[4557]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..2025_02_23_05_23_49.3726007728/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Sep 29 18:56:26 crc restorecon[4557]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Sep 29 18:56:26 crc restorecon[4557]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Sep 29 18:56:26 crc restorecon[4557]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Sep 29 18:56:26 crc restorecon[4557]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..2025_02_23_05_23_49.841175008 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Sep 29 18:56:26 crc restorecon[4557]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..2025_02_23_05_23_49.841175008/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Sep 29 18:56:26 crc restorecon[4557]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Sep 29 18:56:26 crc restorecon[4557]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Sep 29 18:56:26 crc restorecon[4557]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.843437178 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Sep 29 18:56:26 crc restorecon[4557]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.843437178/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/c6c0f2e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/399edc97 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/8049f7cc not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/0cec5484 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/312446d0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c406,c828 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/8e56a35d not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.133159589 not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.133159589/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/2d30ddb9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c380,c909 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/eca8053d not reset as customized by admin to system_u:object_r:container_file_t:s0:c380,c909 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/c3a25c9a not reset as customized by admin to system_u:object_r:container_file_t:s0:c168,c522 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/b9609c22 not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/e8b0eca9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c106,c418 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/b36a9c3f not reset as customized by admin to system_u:object_r:container_file_t:s0:c529,c711 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/38af7b07 not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/ae821620 not reset as customized by admin to system_u:object_r:container_file_t:s0:c106,c418 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/baa23338 not reset as customized by admin to system_u:object_r:container_file_t:s0:c529,c711 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/2c534809 not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3532625537 not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3532625537/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/59b29eae not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c381 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/c91a8e4f not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c381 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/4d87494a not reset as customized by admin to system_u:object_r:container_file_t:s0:c442,c857 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/1e33ca63 not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/8dea7be2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/d0b04a99 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/d84f01e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/4109059b not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/a7258a3e not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/05bdf2b6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/f3261b51 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/315d045e not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/5fdcf278 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/d053f757 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/c2850dc7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..2025_02_23_05_22_30.2390596521 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..2025_02_23_05_22_30.2390596521/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/fcfb0b2b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/c7ac9b7d not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/fa0c0d52 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/c609b6ba not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/2be6c296 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/89a32653 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/4eb9afeb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/13af6efa not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/b03f9724 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/e3d105cc not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/3aed4d83 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1906041176 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1906041176/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/0765fa6e not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/2cefc627 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/3dcc6345 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/365af391 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-Default.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-TechPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-DevPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-TechPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-DevPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-Default.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/b1130c0f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/236a5913 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/b9432e26 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/5ddb0e3f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/986dc4fd not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/8a23ff9a not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/9728ae68 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/665f31d0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1255385357 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1255385357/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_23_57.573792656 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_23_57.573792656/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_22_30.3254245399 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_22_30.3254245399/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/136c9b42 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/98a1575b not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/cac69136 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/5deb77a7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/2ae53400 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3608339744 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3608339744/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/e46f2326 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/dc688d3c not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/3497c3cd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/177eb008 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3819292994 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3819292994/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/af5a2afa not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/d780cb1f not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/49b0f374 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/26fbb125 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.3244779536 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.3244779536/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/cf14125a not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/b7f86972 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/e51d739c not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/88ba6a69 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/669a9acf not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/5cd51231 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/75349ec7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/15c26839 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/45023dcd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/2bb66a50 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/64d03bdd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/ab8e7ca0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/bb9be25f not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.2034221258 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.2034221258/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/9a0b61d3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/d471b9d2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/8cb76b8e not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/11a00840 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/ec355a92 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/992f735e not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1782968797 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1782968797/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/d59cdbbc not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/72133ff0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/c56c834c not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/d13724c7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/0a498258 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fa471982 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fc900d92 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fa7d68da not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/4bacf9b4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/424021b1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/fc2e31a3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/f51eefac not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/c8997f2f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/7481f599 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..2025_02_23_05_22_49.2255460704 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..2025_02_23_05_22_49.2255460704/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/fdafea19 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/d0e1c571 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/ee398915 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/682bb6b8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/a3e67855 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/a989f289 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/915431bd not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/7796fdab not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/dcdb5f19 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/a3aaa88c not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/5508e3e6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/160585de not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/e99f8da3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/8bc85570 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/a5861c91 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/84db1135 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/9e1a6043 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/c1aba1c2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/d55ccd6d not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/971cc9f6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/8f2e3dcf not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/ceb35e9c not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/1c192745 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/5209e501 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/f83de4df not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/e7b978ac not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/c64304a1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/5384386b not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/multus-admission-controller/cce3e3ff not reset as customized by admin to system_u:object_r:container_file_t:s0:c435,c756 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/multus-admission-controller/8fb75465 not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/kube-rbac-proxy/740f573e not reset as customized by admin to system_u:object_r:container_file_t:s0:c435,c756 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/kube-rbac-proxy/32fd1134 not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/0a861bd3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/80363026 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/bfa952a8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_23_05_33_31.2122464563 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_23_05_33_31.2122464563/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config/..2025_02_23_05_33_31.333075221 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/793bf43d not reset as customized by admin to system_u:object_r:container_file_t:s0:c381,c387 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/7db1bb6e not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/4f6a0368 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/c12c7d86 not reset as customized by admin to system_u:object_r:container_file_t:s0:c381,c387 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/36c4a773 not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/4c1e98ae not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/a4c8115c not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/setup/7db1802e not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver/a008a7ab not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-cert-syncer/2c836bac not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-cert-regeneration-controller/0ce62299 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-insecure-readyz/945d2457 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-check-endpoints/7d5c1dd8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/advanced-cluster-management not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/advanced-cluster-management/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-broker-rhel8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-broker-rhel8/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-online not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-online/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams-console not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams-console/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq7-interconnect-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq7-interconnect-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-automation-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-automation-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-cloud-addons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-cloud-addons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry-3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry-3/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-load-balancer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-load-balancer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-businessautomation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-businessautomation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator/index.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/businessautomation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/businessautomation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cephcsi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cephcsi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cincinnati-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cincinnati-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-kube-descheduler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-kube-descheduler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-logging not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-logging/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/compliance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/compliance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/container-security-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/container-security-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/costmanagement-metrics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/costmanagement-metrics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cryostat-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cryostat-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datagrid not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datagrid/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devspaces not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devspaces/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devworkspace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devworkspace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dpu-network-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dpu-network-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eap not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eap/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-dns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-dns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/file-integrity-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/file-integrity-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-apicurito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-apicurito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-console not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-console/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-online not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-online/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gatekeeper-operator-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gatekeeper-operator-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jws-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jws-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management-hub not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management-hub/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kiali-ossm not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kiali-ossm/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubevirt-hyperconverged not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubevirt-hyperconverged/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logic-operator-rhel8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logic-operator-rhel8/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lvms-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lvms-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mcg-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mcg-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mta-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mta-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtr-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtr-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-client-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-client-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-csi-addons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-csi-addons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-multicluster-orchestrator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-multicluster-orchestrator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-prometheus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-prometheus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-hub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-hub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/bundle-v1.15.0.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/channel.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/package.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-custom-metrics-autoscaler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-custom-metrics-autoscaler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-gitops-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-gitops-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-pipelines-operator-rh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-pipelines-operator-rh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-secondary-scheduler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-secondary-scheduler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-bridge-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-bridge-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/recipe not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/recipe/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-camel-k not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-camel-k/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-hawtio-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-hawtio-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redhat-oadp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redhat-oadp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rh-service-binding-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rh-service-binding-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhacs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhacs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhbk-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhbk-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhdh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhdh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-prometheus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-prometheus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhpam-kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhpam-kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhsso-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhsso-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rook-ceph-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rook-ceph-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/run-once-duration-override-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/run-once-duration-override-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sandboxed-containers-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sandboxed-containers-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/security-profiles-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/security-profiles-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/serverless-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/serverless-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-registry-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-registry-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator3/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/submariner not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/submariner/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tang-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tang-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustee-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustee-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volsync-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volsync-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/web-terminal not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/web-terminal/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/bc8d0691 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/6b76097a not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/34d1af30 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/312ba61c not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/645d5dd1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/16e825f0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/4cf51fc9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/2a23d348 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/075dbd49 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/dd585ddd not reset as customized by admin to system_u:object_r:container_file_t:s0:c377,c642 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/17ebd0ab not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c343 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/005579f4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_23_05_23_11.449897510 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_23_05_23_11.449897510/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_23_11.1287037894 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..2025_02_23_05_23_11.1301053334 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..2025_02_23_05_23_11.1301053334/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/bf5f3b9c not reset as customized by admin to system_u:object_r:container_file_t:s0:c49,c263 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/af276eb7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c701 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/ea28e322 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/692e6683 not reset as customized by admin to system_u:object_r:container_file_t:s0:c49,c263 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/871746a7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c701 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/4eb2e958 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..2025_02_24_06_09_06.2875086261 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..2025_02_24_06_09_06.2875086261/console-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/console-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_09_06.286118152 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_09_06.286118152/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..2025_02_24_06_09_06.3865795478 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..2025_02_24_06_09_06.3865795478/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..2025_02_24_06_09_06.584414814 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..2025_02_24_06_09_06.584414814/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/containers/console/ca9b62da not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/containers/console/0edd6fce not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.openshift-global-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.openshift-global-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.1071801880 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.1071801880/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..2025_02_24_06_20_07.2494444877 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..2025_02_24_06_20_07.2494444877/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/containers/controller-manager/89b4555f not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..2025_02_23_05_23_22.4071100442 not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..2025_02_23_05_23_22.4071100442/Corefile not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/Corefile not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/655fcd71 not reset as customized by admin to system_u:object_r:container_file_t:s0:c457,c841 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/0d43c002 not reset as customized by admin to system_u:object_r:container_file_t:s0:c55,c1022 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/e68efd17 not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/9acf9b65 not reset as customized by admin to system_u:object_r:container_file_t:s0:c457,c841 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/5ae3ff11 not reset as customized by admin to system_u:object_r:container_file_t:s0:c55,c1022 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/1e59206a not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/27af16d1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c304,c1017 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/7918e729 not reset as customized by admin to system_u:object_r:container_file_t:s0:c853,c893 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/5d976d0e not reset as customized by admin to system_u:object_r:container_file_t:s0:c585,c981 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..2025_02_23_05_38_56.1112187283 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..2025_02_23_05_38_56.1112187283/controller-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/controller-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_38_56.2839772658 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_38_56.2839772658/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/d7f55cbb not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/f0812073 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/1a56cbeb not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/7fdd437e not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/cdfb5652 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_24_06_17_29.3844392896 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_24_06_17_29.3844392896/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..2025_02_24_06_17_29.848549803 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..2025_02_24_06_17_29.848549803/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..2025_02_24_06_17_29.780046231 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..2025_02_24_06_17_29.780046231/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_17_29.2729721485 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_17_29.2729721485/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/fix-audit-permissions/fb93119e not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/openshift-apiserver/f1e8fc0e not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/openshift-apiserver-check-endpoints/218511f3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs/k8s-webhook-server not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs/k8s-webhook-server/serving-certs not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/ca8af7b3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/72cc8a75 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/6e8a3760 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..2025_02_23_05_27_30.557428972 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..2025_02_23_05_27_30.557428972/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/4c3455c0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/2278acb0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/4b453e4f not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/3ec09bda not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132/anchors not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132/anchors/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/anchors not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/edk2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/edk2/cacerts.bin not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/java not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/java/cacerts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/openssl not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/openssl/ca-bundle.trust.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/email-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/objsign-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2ae6433e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fde84897.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/75680d2e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/openshift-service-serving-signer_1740288168.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/facfc4fa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8f5a969c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CFCA_EV_ROOT.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9ef4a08a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ingress-operator_1740288202.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2f332aed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/248c8271.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d10a21f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ACCVRAIZ1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a94d09e5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c9a4d3b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/40193066.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AC_RAIZ_FNMT-RCM.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cd8c0d63.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b936d1c6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CA_Disig_Root_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4fd49c6c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AC_RAIZ_FNMT-RCM_SERVIDORES_SEGUROS.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b81b93f0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f9a69fa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certigna.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b30d5fda.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ANF_Secure_Server_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b433981b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/93851c9e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9282e51c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e7dd1bc4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Actalis_Authentication_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/930ac5d2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f47b495.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e113c810.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5931b5bc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Commercial.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2b349938.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e48193cf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/302904dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a716d4ed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Networking.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/93bc0acc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/86212b19.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certigna_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Premium.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b727005e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dbc54cab.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f51bb24c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c28a8a30.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Premium_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9c8dfbd4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ccc52f49.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cb1c3204.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ce5e74ef.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fd08c599.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6d41d539.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fb5fa911.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e35234b1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8cb5ee0f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a7c655d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f8fc53da.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/de6d66f3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d41b5e2a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/41a3f684.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1df5a75f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_2011.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e36a6752.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b872f2b4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9576d26b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/228f89db.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_Root_CA_ECC_TLS_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fb717492.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2d21b73c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0b1b94ef.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/595e996b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_Root_CA_RSA_TLS_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9b46e03d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/128f4b91.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Buypass_Class_3_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/81f2d2b1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Autoridad_de_Certificacion_Firmaprofesional_CIF_A62634068.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3bde41ac.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d16a5865.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_EC-384_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/BJCA_Global_Root_CA1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0179095f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ffa7f1eb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9482e63a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d4dae3dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/BJCA_Global_Root_CA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3e359ba6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7e067d03.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/95aff9e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d7746a63.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Baltimore_CyberTrust_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/653b494a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3ad48a91.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Network_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Buypass_Class_2_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/54657681.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/82223c44.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e8de2f56.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2d9dafe4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d96b65e2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee64a828.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/40547a79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5a3f0ff8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a780d93.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/34d996fb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_ECC_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/eed8c118.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/89c02a45.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certainly_Root_R1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b1159c4c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_RSA_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d6325660.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d4c339cb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8312c4c1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certainly_Root_E1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8508e720.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5fdd185d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/48bec511.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/69105f4f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0b9bc432.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Network_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/32888f65.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_ECC_Root-01.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b03dec0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/219d9499.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_ECC_Root-02.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5acf816d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cbf06781.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_RSA_Root-01.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dc99f41e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_RSA_Root-02.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AAA_Certificate_Services.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/985c1f52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8794b4e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_BR_Root_CA_1_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e7c037b4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ef954a4e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_EV_Root_CA_1_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2add47b6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/90c5a3c8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_Root_Class_3_CA_2_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0f3e76e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/53a1b57a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_Root_Class_3_CA_2_EV_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5ad8a5d6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/68dd7389.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9d04f354.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d6437c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/062cdee6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bd43e1dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7f3d5d1d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c491639e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_E46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3513523f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/399e7759.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/feffd413.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d18e9066.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/607986c7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c90bc37d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1b0f7e5c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e08bfd1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dd8e9d41.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ed39abd0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a3418fda.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bc3f2570.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_High_Assurance_EV_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/244b5494.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/81b9768f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4be590e0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_TLS_ECC_P384_Root_G5.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9846683b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/252252d2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e8e7201.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ISRG_Root_X1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_TLS_RSA4096_Root_G5.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d52c538d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c44cc0c0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_R46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Trusted_Root_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/75d1b2ed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a2c66da8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ecccd8db.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust.net_Certification_Authority__2048_.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/aee5f10d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3e7271e8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0e59380.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4c3982f2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b99d060.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bf64f35b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0a775a30.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/002c0b4f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cc450945.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_EC1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/106f3e4d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b3fb433b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4042bcee.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/02265526.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/455f1b52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0d69c7e1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9f727ac7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5e98733a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f0cd152c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dc4d6a89.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6187b673.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/FIRMAPROFESIONAL_CA_ROOT-A_WEB.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ba8887ce.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/068570d1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f081611a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/48a195d8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GDCA_TrustAUTH_R5_ROOT.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0f6fa695.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ab59055e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b92fd57f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GLOBALTRUST_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fa5da96b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1ec40989.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7719f463.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1001acf7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f013ecaf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/626dceaf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c559d742.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1d3472b9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9479c8c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a81e292b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4bfab552.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Go_Daddy_Class_2_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Sectigo_Public_Server_Authentication_Root_E46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Go_Daddy_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e071171e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/57bcb2da.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HARICA_TLS_ECC_Root_CA_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ab5346f4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5046c355.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HARICA_TLS_RSA_Root_CA_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/865fbdf9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/da0cfd1d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/85cde254.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hellenic_Academic_and_Research_Institutions_ECC_RootCA_2015.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cbb3f32b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SecureSign_RootCA11.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hellenic_Academic_and_Research_Institutions_RootCA_2015.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5860aaa6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/31188b5e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HiPKI_Root_CA_-_G1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c7f1359b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f15c80c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hongkong_Post_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/09789157.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ISRG_Root_X2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/18856ac4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e09d511.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/IdenTrust_Commercial_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cf701eeb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d06393bb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/IdenTrust_Public_Sector_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/10531352.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Izenpe.com.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SecureTrust_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0ed035a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsec_e-Szigno_Root_CA_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8160b96c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e8651083.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2c63f966.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_RootCA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsoft_ECC_Root_Certificate_Authority_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d89cda1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/01419da9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_TLS_RSA_Root_CA_2022.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b7a5b843.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsoft_RSA_Root_Certificate_Authority_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bf53fb88.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9591a472.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3afde786.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SwissSign_Gold_CA_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/NAVER_Global_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3fb36b73.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d39b0a2c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a89d74c2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cd58d51e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b7db1890.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/NetLock_Arany__Class_Gold__F__tan__s__tv__ny.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/988a38cb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/60afe812.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f39fc864.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5443e9e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/OISTE_WISeKey_Global_Root_GB_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e73d606e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dfc0fe80.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b66938e9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e1eab7c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/OISTE_WISeKey_Global_Root_GC_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/773e07ad.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c899c73.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d59297b8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ddcda989.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_1_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/749e9e03.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/52b525c7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_RootCA3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d7e8dc79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a819ef2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/08063a00.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b483515.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_2_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/064e0aa9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1f58a078.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6f7454b3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7fa05551.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/76faf6c0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9339512a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f387163d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee37c333.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_3_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e18bfb83.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e442e424.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fe8a2cd8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/23f4c490.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5cd81ad7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_EV_Root_Certification_Authority_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f0c70a8d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7892ad52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SZAFIR_ROOT_CA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4f316efb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_EV_Root_Certification_Authority_RSA_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/06dc52d5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/583d0756.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Sectigo_Public_Server_Authentication_Root_R46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_Root_Certification_Authority_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0bf05006.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/88950faa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9046744a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c860d51.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_Root_Certification_Authority_RSA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6fa5da56.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/33ee480d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Secure_Global_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/63a2c897.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_TLS_ECC_Root_CA_2022.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bdacca6f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ff34af3f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dbff3a01.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_ECC_RootCA1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_Root_CA_-_C1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Class_2_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/406c9bb1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_ECC_Root_CA_-_C3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Services_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SwissSign_Silver_CA_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/99e1b953.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/T-TeleSec_GlobalRoot_Class_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/vTrus_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/T-TeleSec_GlobalRoot_Class_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/14bc7599.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TUBITAK_Kamu_SM_SSL_Kok_Sertifikasi_-_Surum_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TWCA_Global_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a3adc42.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TWCA_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f459871d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telekom_Security_TLS_ECC_Root_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_Root_CA_-_G1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telekom_Security_TLS_RSA_Root_2023.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TeliaSonera_Root_CA_v1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telia_Root_CA_v2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8f103249.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f058632f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca-certificates.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TrustAsia_Global_Root_CA_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9bf03295.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/98aaf404.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TrustAsia_Global_Root_CA_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1cef98f5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/073bfcc5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2923b3f9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f249de83.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/edcbddb5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_ECC_Root_CA_-_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_ECC_P256_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9b5697b0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1ae85e5e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b74d2bd5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_ECC_P384_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d887a5bb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9aef356c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TunTrust_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fd64f3fc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e13665f9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/UCA_Extended_Validation_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0f5dc4f3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/da7377f6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/UCA_Global_G2_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c01eb047.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/304d27c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ed858448.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/USERTrust_ECC_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f30dd6ad.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/04f60c28.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/vTrus_ECC_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/USERTrust_RSA_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fc5a8f99.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/35105088.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee532fd5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/XRamp_Global_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/706f604c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/76579174.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/certSIGN_ROOT_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d86cdd1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/882de061.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/certSIGN_ROOT_CA_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f618aec.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a9d40e02.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e-Szigno_Root_CA_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e868b802.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/83e9984f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ePKI_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca6e4ad9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9d6523ce.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4b718d9b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/869fbf79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/containers/registry/f8d22bdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/6e8bbfac not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/54dd7996 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/a4f1bb05 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/207129da not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/c1df39e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/15b8f1cd not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3523263858 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3523263858/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..2025_02_23_05_27_49.3256605594 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..2025_02_23_05_27_49.3256605594/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/77bd6913 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/2382c1b1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/704ce128 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/70d16fe0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/bfb95535 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/57a8e8e2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3413793711 not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3413793711/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/1b9d3e5e not reset as customized by admin to system_u:object_r:container_file_t:s0:c107,c917 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/fddb173c not reset as customized by admin to system_u:object_r:container_file_t:s0:c202,c983 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/95d3c6c4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/bfb5fff5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/2aef40aa not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/c0391cad not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/1119e69d not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/660608b4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/8220bd53 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/cluster-policy-controller/85f99d5c not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/cluster-policy-controller/4b0225f6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-cert-syncer/9c2a3394 not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-cert-syncer/e820b243 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-recovery-controller/1ca52ea0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-recovery-controller/e6988e45 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..2025_02_24_06_09_21.2517297950 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..2025_02_24_06_09_21.2517297950/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/6655f00b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/98bc3986 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/08e3458a not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/2a191cb0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/6c4eeefb not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/f61a549c not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/hostpath-provisioner/24891863 not reset as customized by admin to system_u:object_r:container_file_t:s0:c37,c572 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/hostpath-provisioner/fbdfd89c not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/liveness-probe/9b63b3bc not reset as customized by admin to system_u:object_r:container_file_t:s0:c37,c572 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/liveness-probe/8acde6d6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/node-driver-registrar/59ecbba3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/csi-provisioner/685d4be3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/openshift-route-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/openshift-route-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/openshift-route-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/openshift-route-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.2950937851 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.2950937851/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/containers/route-controller-manager/feaea55e not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abinitio-runtime-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abinitio-runtime-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/accuknox-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/accuknox-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aci-containers-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aci-containers-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airlock-microgateway not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airlock-microgateway/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ako-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ako-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloy not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloy/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anchore-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anchore-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-cloud-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-cloud-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-dcap-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-dcap-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cfm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cfm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium-enterprise not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium-enterprise/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloud-native-postgresql not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloud-native-postgresql/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudera-streams-messaging-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudera-streams-messaging-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudnative-pg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudnative-pg/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cnfv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cnfv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/conjur-follower-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/conjur-follower-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/coroot-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/coroot-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cte-k8s-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cte-k8s-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-deploy-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-deploy-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-release-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-release-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edb-hcp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edb-hcp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-eck-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-eck-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/federatorai-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/federatorai-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fujitsu-enterprise-postgres-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fujitsu-enterprise-postgres-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/function-mesh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/function-mesh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/harness-gitops-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/harness-gitops-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hcp-terraform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hcp-terraform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hpe-ezmeral-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hpe-ezmeral-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-application-gateway-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-application-gateway-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-directory-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-directory-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-dr-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-dr-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-licensing-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-licensing-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-sds-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-sds-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infrastructure-asset-orchestrator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infrastructure-asset-orchestrator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-device-plugins-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-device-plugins-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-kubernetes-power-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-kubernetes-power-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-openshift-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-openshift-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8s-triliovault not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8s-triliovault/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-ati-updates not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-ati-updates/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-framework not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-framework/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-ingress not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-ingress/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-licensing not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-licensing/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-sso not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-sso/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-load-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-load-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-loadcore-agents not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-loadcore-agents/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nats-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nats-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nimbusmosaic-dusim not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nimbusmosaic-dusim/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-rest-api-browser-v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-rest-api-browser-v1/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-appsec not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-appsec/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-db/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-diagnostics not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-diagnostics/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-logging not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-logging/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-migration not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-migration/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-msg-broker not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-msg-broker/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-notifications not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-notifications/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-stats-dashboards not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-stats-dashboards/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-storage not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-storage/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-test-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-test-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-ui not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-ui/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-websocket-service not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-websocket-service/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kong-gateway-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kong-gateway-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubearmor-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubearmor-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lenovo-locd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lenovo-locd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memcached-operator-ogaye not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memcached-operator-ogaye/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memory-machine-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memory-machine-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-enterprise not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-enterprise/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netapp-spark-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netapp-spark-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-adm-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-adm-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-repository-ha-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-repository-ha-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nginx-ingress-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nginx-ingress-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nim-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nim-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxiq-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxiq-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxrm-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxrm-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odigos-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odigos-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/open-liberty-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/open-liberty-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftartifactoryha-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftartifactoryha-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftxray-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftxray-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/operator-certification-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/operator-certification-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pmem-csi-operator-os not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pmem-csi-operator-os/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-component-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-component-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-fabric-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-fabric-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sanstoragecsi-operator-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sanstoragecsi-operator-bundle/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/smilecdr-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/smilecdr-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sriov-fec not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sriov-fec/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-commons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-commons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-zookeeper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-zookeeper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-tsc-client-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-tsc-client-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tawon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tawon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tigera-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tigera-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-secrets-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-secrets-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vcp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vcp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/webotx-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/webotx-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/63709497 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/d966b7fd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/f5773757 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/81c9edb9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/57bf57ee not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/86f5e6aa not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/0aabe31d not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/d2af85c2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/09d157d9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acm-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acm-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acmpca-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acmpca-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigateway-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigateway-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigatewayv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigatewayv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-applicationautoscaling-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-applicationautoscaling-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-athena-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-athena-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudfront-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudfront-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudtrail-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudtrail-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatch-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatch-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatchlogs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatchlogs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-documentdb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-documentdb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-dynamodb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-dynamodb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ec2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ec2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecr-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecr-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-efs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-efs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eks-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eks-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elasticache-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elasticache-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elbv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elbv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-emrcontainers-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-emrcontainers-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eventbridge-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eventbridge-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-iam-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-iam-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kafka-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kafka-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-keyspaces-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-keyspaces-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kinesis-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kinesis-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kms-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kms-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-lambda-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-lambda-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-memorydb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-memorydb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-mq-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-mq-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-networkfirewall-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-networkfirewall-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-opensearchservice-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-opensearchservice-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-organizations-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-organizations-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-pipes-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-pipes-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-prometheusservice-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-prometheusservice-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-rds-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-rds-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-recyclebin-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-recyclebin-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53resolver-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53resolver-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-s3-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-s3-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sagemaker-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sagemaker-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-secretsmanager-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-secretsmanager-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ses-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ses-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sfn-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sfn-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sns-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sns-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sqs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sqs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ssm-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ssm-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-wafv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-wafv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airflow-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airflow-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloydb-omni-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloydb-omni-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alvearie-imaging-ingestion not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alvearie-imaging-ingestion/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amd-gpu-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amd-gpu-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/analytics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/analytics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/annotationlab not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/annotationlab/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-api-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-api-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apimatic-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apimatic-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/application-services-metering-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/application-services-metering-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/argocd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/argocd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/assisted-service-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/assisted-service-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/automotive-infra not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/automotive-infra/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-efs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-efs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/awss3-operator-registry not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/awss3-operator-registry/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/azure-service-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/azure-service-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/beegfs-csi-driver-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/beegfs-csi-driver-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-k not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-k/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-karavan-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-karavan-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator-community not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator-community/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-utils-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-utils-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-aas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-aas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-impairment-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-impairment-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/codeflare-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/codeflare-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-kubevirt-hyperconverged not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-kubevirt-hyperconverged/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-trivy-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-trivy-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-windows-machine-config-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-windows-machine-config-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/customized-user-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/customized-user-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cxl-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cxl-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dapr-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dapr-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datatrucker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datatrucker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dbaas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dbaas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/debezium-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/debezium-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/deployment-validation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/deployment-validation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devopsinabox not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devopsinabox/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-amlen-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-amlen-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-che not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-che/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ecr-secret-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ecr-secret-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edp-keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edp-keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/egressip-ipam-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/egressip-ipam-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ember-csi-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ember-csi-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/etcd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/etcd/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eventing-kogito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eventing-kogito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-secrets-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-secrets-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flink-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flink-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8gb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8gb/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fossul-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fossul-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/github-arc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/github-arc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitops-primer not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitops-primer/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitwebhook-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitwebhook-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/global-load-balancer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/global-load-balancer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/grafana-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/grafana-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/group-sync-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/group-sync-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hawtio-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hawtio-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hedvig-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hedvig-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hive-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hive-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/horreum-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/horreum-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hyperfoil-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hyperfoil-bundle/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator-community not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator-community/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-spectrum-scale-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-spectrum-scale-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibmcloud-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibmcloud-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infinispan not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infinispan/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/integrity-shield-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/integrity-shield-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ipfs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ipfs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/istio-workspace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/istio-workspace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kaoto-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kaoto-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keda not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keda/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keepalived-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keepalived-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-permissions-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-permissions-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/klusterlet not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/klusterlet/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/koku-metrics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/koku-metrics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/konveyor-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/konveyor-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/korrel8r not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/korrel8r/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kuadrant-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kuadrant-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kube-green not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kube-green/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubernetes-imagepuller-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubernetes-imagepuller-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/l5-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/l5-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/layer7-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/layer7-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lbconfig-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lbconfig-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lib-bucket-provisioner not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lib-bucket-provisioner/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/limitador-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/limitador-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logging-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logging-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mariadb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mariadb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marin3r not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marin3r/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mercury-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mercury-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/microcks not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/microcks/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/move2kube-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/move2kube-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multi-nic-cni-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multi-nic-cni-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-global-hub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-global-hub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-operators-subscription not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-operators-subscription/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/must-gather-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/must-gather-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/namespace-configuration-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/namespace-configuration-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ncn-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ncn-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ndmspc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ndmspc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator-m88i not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator-m88i/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nfs-provisioner-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nfs-provisioner-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nlp-server not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nlp-server/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-discovery-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-discovery-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nsm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nsm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oadp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oadp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oci-ccm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oci-ccm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odoo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odoo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opendatahub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opendatahub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openebs not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openebs/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-nfd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-nfd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-node-upgrade-mutex-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-node-upgrade-mutex-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-qiskit-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-qiskit-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patch-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patch-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patterns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patterns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pelorus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pelorus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/percona-xtradb-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/percona-xtradb-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-essentials not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-essentials/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/postgresql not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/postgresql/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/proactive-node-scaling-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/proactive-node-scaling-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/project-quay not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/project-quay/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus-exporter-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus-exporter-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pulp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pulp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-messaging-topology-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-messaging-topology-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/reportportal-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/reportportal-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/resource-locker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/resource-locker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhoas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhoas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ripsaw not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ripsaw/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sailoperator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sailoperator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-commerce-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-commerce-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-data-intelligence-observer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-data-intelligence-observer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-hana-express-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-hana-express-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-binding-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-binding-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/shipwright-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/shipwright-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sigstore-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sigstore-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snapscheduler not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snapscheduler/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snyk-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snyk-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/socmmd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/socmmd/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonar-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonar-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosivio not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosivio/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonataflow-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonataflow-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosreport-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosreport-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/spark-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/spark-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/special-resource-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/special-resource-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/strimzi-kafka-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/strimzi-kafka-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/syndesis not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/syndesis/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tagger not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tagger/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tf-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tf-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tidb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tidb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trident-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trident-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustify-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustify-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ucs-ci-solutions-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ucs-ci-solutions-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/universal-crossplane not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/universal-crossplane/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/varnish-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/varnish-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-config-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-config-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/verticadb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/verticadb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volume-expander-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volume-expander-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/wandb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/wandb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/windup-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/windup-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yaks not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yaks/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/c0fe7256 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/c30319e4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/e6b1dd45 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/2bb643f0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/920de426 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/70fa1e87 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/a1c12a2f not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/9442e6c7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/5b45ec72 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abot-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abot-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/entando-k8s-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/entando-k8s-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-paygo-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-paygo-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-term-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-term-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/linstor-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/linstor-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-deploy-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-deploy-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-paygo-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-paygo-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vfunction-server-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vfunction-server-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yugabyte-platform-operator-bundle-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yugabyte-platform-operator-bundle-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/3c9f3a59 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/1091c11b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/9a6821c6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/ec0c35e2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/517f37e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/6214fe78 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/ba189c8b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/351e4f31 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/c0f219ff not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/8069f607 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/559c3d82 not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/605ad488 not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/148df488 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/3bf6dcb4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/022a2feb not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/938c3924 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/729fe23e not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/1fd5cbd4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/a96697e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/e155ddca not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/10dd0e0f not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..2025_02_24_06_09_35.3018472960 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..2025_02_24_06_09_35.3018472960/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..2025_02_24_06_09_35.4262376737 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..2025_02_24_06_09_35.4262376737/audit.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/audit.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..2025_02_24_06_09_35.2630275752 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..2025_02_24_06_09_35.2630275752/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..2025_02_24_06_09_35.2376963788 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..2025_02_24_06_09_35.2376963788/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/containers/oauth-openshift/6f2c8392 not reset as customized by admin to system_u:object_r:container_file_t:s0:c267,c588 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/containers/oauth-openshift/bd241ad9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/plugins not reset as customized by admin to system_u:object_r:container_file_t:s0 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/plugins/csi-hostpath not reset as customized by admin to system_u:object_r:container_file_t:s0 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/plugins/csi-hostpath/csi.sock not reset as customized by admin to system_u:object_r:container_file_t:s0 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/plugins/kubernetes.io not reset as customized by admin to system_u:object_r:container_file_t:s0 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/plugins/kubernetes.io/csi not reset as customized by admin to system_u:object_r:container_file_t:s0 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner not reset as customized by admin to system_u:object_r:container_file_t:s0 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983 not reset as customized by admin to system_u:object_r:container_file_t:s0 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount not reset as customized by admin to system_u:object_r:container_file_t:s0 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/vol_data.json not reset as customized by admin to system_u:object_r:container_file_t:s0 Sep 29 18:56:27 crc restorecon[4557]: /var/lib/kubelet/plugins_registry not reset as customized by admin to system_u:object_r:container_file_t:s0 Sep 29 18:56:27 crc restorecon[4557]: Relabeled /var/usrlocal/bin/kubenswrapper from system_u:object_r:bin_t:s0 to system_u:object_r:kubelet_exec_t:s0 Sep 29 18:56:28 crc kubenswrapper[4792]: Flag --container-runtime-endpoint has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Sep 29 18:56:28 crc kubenswrapper[4792]: Flag --minimum-container-ttl-duration has been deprecated, Use --eviction-hard or --eviction-soft instead. Will be removed in a future version. Sep 29 18:56:28 crc kubenswrapper[4792]: Flag --volume-plugin-dir has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Sep 29 18:56:28 crc kubenswrapper[4792]: Flag --register-with-taints has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Sep 29 18:56:28 crc kubenswrapper[4792]: Flag --pod-infra-container-image has been deprecated, will be removed in a future release. Image garbage collector will get sandbox image information from CRI. Sep 29 18:56:28 crc kubenswrapper[4792]: Flag --system-reserved has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.607843 4792 server.go:211] "--pod-infra-container-image will not be pruned by the image garbage collector in kubelet and should also be set in the remote runtime" Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.613344 4792 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.613366 4792 feature_gate.go:330] unrecognized feature gate: PlatformOperators Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.613372 4792 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.613378 4792 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.613384 4792 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.613389 4792 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.613395 4792 feature_gate.go:330] unrecognized feature gate: OVNObservability Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.613400 4792 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.613405 4792 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.613410 4792 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.613415 4792 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.613420 4792 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.613427 4792 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.613433 4792 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.613459 4792 feature_gate.go:330] unrecognized feature gate: GatewayAPI Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.613464 4792 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.613469 4792 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.613474 4792 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.613487 4792 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.613492 4792 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.613497 4792 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.613502 4792 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.613507 4792 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.613512 4792 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.613516 4792 feature_gate.go:330] unrecognized feature gate: Example Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.613521 4792 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.613535 4792 feature_gate.go:330] unrecognized feature gate: PinnedImages Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.613541 4792 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.613546 4792 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.613550 4792 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.613557 4792 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.613563 4792 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.613570 4792 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.613577 4792 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.613582 4792 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.613586 4792 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.613591 4792 feature_gate.go:330] unrecognized feature gate: InsightsConfig Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.613596 4792 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.613601 4792 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.613605 4792 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.613610 4792 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.613615 4792 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.613620 4792 feature_gate.go:330] unrecognized feature gate: NewOLM Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.613625 4792 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.613630 4792 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.613635 4792 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.613640 4792 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.613644 4792 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.613649 4792 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.613654 4792 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.613658 4792 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.613663 4792 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.613668 4792 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.613673 4792 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.613678 4792 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.613682 4792 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.613687 4792 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.613692 4792 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.613696 4792 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.613703 4792 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.613709 4792 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.613715 4792 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.613720 4792 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.613725 4792 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.613730 4792 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.613735 4792 feature_gate.go:330] unrecognized feature gate: SignatureStores Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.613740 4792 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.613745 4792 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.613750 4792 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.613755 4792 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.613762 4792 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.613880 4792 flags.go:64] FLAG: --address="0.0.0.0" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.613892 4792 flags.go:64] FLAG: --allowed-unsafe-sysctls="[]" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.613902 4792 flags.go:64] FLAG: --anonymous-auth="true" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.613910 4792 flags.go:64] FLAG: --application-metrics-count-limit="100" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.613917 4792 flags.go:64] FLAG: --authentication-token-webhook="false" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.613923 4792 flags.go:64] FLAG: --authentication-token-webhook-cache-ttl="2m0s" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.613931 4792 flags.go:64] FLAG: --authorization-mode="AlwaysAllow" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.613939 4792 flags.go:64] FLAG: --authorization-webhook-cache-authorized-ttl="5m0s" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.613945 4792 flags.go:64] FLAG: --authorization-webhook-cache-unauthorized-ttl="30s" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.613951 4792 flags.go:64] FLAG: --boot-id-file="/proc/sys/kernel/random/boot_id" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.613957 4792 flags.go:64] FLAG: --bootstrap-kubeconfig="/etc/kubernetes/kubeconfig" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.613963 4792 flags.go:64] FLAG: --cert-dir="/var/lib/kubelet/pki" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.613968 4792 flags.go:64] FLAG: --cgroup-driver="cgroupfs" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.613975 4792 flags.go:64] FLAG: --cgroup-root="" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.613980 4792 flags.go:64] FLAG: --cgroups-per-qos="true" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.613986 4792 flags.go:64] FLAG: --client-ca-file="" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.613992 4792 flags.go:64] FLAG: --cloud-config="" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.613997 4792 flags.go:64] FLAG: --cloud-provider="" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.614003 4792 flags.go:64] FLAG: --cluster-dns="[]" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.614010 4792 flags.go:64] FLAG: --cluster-domain="" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.614015 4792 flags.go:64] FLAG: --config="/etc/kubernetes/kubelet.conf" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.614021 4792 flags.go:64] FLAG: --config-dir="" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.614026 4792 flags.go:64] FLAG: --container-hints="/etc/cadvisor/container_hints.json" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.614032 4792 flags.go:64] FLAG: --container-log-max-files="5" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.614040 4792 flags.go:64] FLAG: --container-log-max-size="10Mi" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.614046 4792 flags.go:64] FLAG: --container-runtime-endpoint="/var/run/crio/crio.sock" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.614053 4792 flags.go:64] FLAG: --containerd="/run/containerd/containerd.sock" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.614061 4792 flags.go:64] FLAG: --containerd-namespace="k8s.io" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.614068 4792 flags.go:64] FLAG: --contention-profiling="false" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.614077 4792 flags.go:64] FLAG: --cpu-cfs-quota="true" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.614084 4792 flags.go:64] FLAG: --cpu-cfs-quota-period="100ms" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.614090 4792 flags.go:64] FLAG: --cpu-manager-policy="none" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.614097 4792 flags.go:64] FLAG: --cpu-manager-policy-options="" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.614106 4792 flags.go:64] FLAG: --cpu-manager-reconcile-period="10s" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.614113 4792 flags.go:64] FLAG: --enable-controller-attach-detach="true" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.614120 4792 flags.go:64] FLAG: --enable-debugging-handlers="true" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.614126 4792 flags.go:64] FLAG: --enable-load-reader="false" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.614133 4792 flags.go:64] FLAG: --enable-server="true" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.614140 4792 flags.go:64] FLAG: --enforce-node-allocatable="[pods]" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.614148 4792 flags.go:64] FLAG: --event-burst="100" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.614156 4792 flags.go:64] FLAG: --event-qps="50" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.614163 4792 flags.go:64] FLAG: --event-storage-age-limit="default=0" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.614171 4792 flags.go:64] FLAG: --event-storage-event-limit="default=0" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.614178 4792 flags.go:64] FLAG: --eviction-hard="" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.614187 4792 flags.go:64] FLAG: --eviction-max-pod-grace-period="0" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.614193 4792 flags.go:64] FLAG: --eviction-minimum-reclaim="" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.614200 4792 flags.go:64] FLAG: --eviction-pressure-transition-period="5m0s" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.614206 4792 flags.go:64] FLAG: --eviction-soft="" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.614213 4792 flags.go:64] FLAG: --eviction-soft-grace-period="" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.614220 4792 flags.go:64] FLAG: --exit-on-lock-contention="false" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.614227 4792 flags.go:64] FLAG: --experimental-allocatable-ignore-eviction="false" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.614234 4792 flags.go:64] FLAG: --experimental-mounter-path="" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.614240 4792 flags.go:64] FLAG: --fail-cgroupv1="false" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.614246 4792 flags.go:64] FLAG: --fail-swap-on="true" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.614251 4792 flags.go:64] FLAG: --feature-gates="" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.614259 4792 flags.go:64] FLAG: --file-check-frequency="20s" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.614267 4792 flags.go:64] FLAG: --global-housekeeping-interval="1m0s" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.614274 4792 flags.go:64] FLAG: --hairpin-mode="promiscuous-bridge" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.614282 4792 flags.go:64] FLAG: --healthz-bind-address="127.0.0.1" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.614289 4792 flags.go:64] FLAG: --healthz-port="10248" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.614296 4792 flags.go:64] FLAG: --help="false" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.614303 4792 flags.go:64] FLAG: --hostname-override="" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.614309 4792 flags.go:64] FLAG: --housekeeping-interval="10s" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.614332 4792 flags.go:64] FLAG: --http-check-frequency="20s" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.614338 4792 flags.go:64] FLAG: --image-credential-provider-bin-dir="" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.614355 4792 flags.go:64] FLAG: --image-credential-provider-config="" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.614362 4792 flags.go:64] FLAG: --image-gc-high-threshold="85" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.614369 4792 flags.go:64] FLAG: --image-gc-low-threshold="80" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.614376 4792 flags.go:64] FLAG: --image-service-endpoint="" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.614394 4792 flags.go:64] FLAG: --kernel-memcg-notification="false" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.614400 4792 flags.go:64] FLAG: --kube-api-burst="100" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.614408 4792 flags.go:64] FLAG: --kube-api-content-type="application/vnd.kubernetes.protobuf" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.614415 4792 flags.go:64] FLAG: --kube-api-qps="50" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.614422 4792 flags.go:64] FLAG: --kube-reserved="" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.614429 4792 flags.go:64] FLAG: --kube-reserved-cgroup="" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.614435 4792 flags.go:64] FLAG: --kubeconfig="/var/lib/kubelet/kubeconfig" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.614444 4792 flags.go:64] FLAG: --kubelet-cgroups="" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.614451 4792 flags.go:64] FLAG: --local-storage-capacity-isolation="true" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.614460 4792 flags.go:64] FLAG: --lock-file="" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.614474 4792 flags.go:64] FLAG: --log-cadvisor-usage="false" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.614480 4792 flags.go:64] FLAG: --log-flush-frequency="5s" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.614486 4792 flags.go:64] FLAG: --log-json-info-buffer-size="0" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.614495 4792 flags.go:64] FLAG: --log-json-split-stream="false" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.614501 4792 flags.go:64] FLAG: --log-text-info-buffer-size="0" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.614507 4792 flags.go:64] FLAG: --log-text-split-stream="false" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.614512 4792 flags.go:64] FLAG: --logging-format="text" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.614518 4792 flags.go:64] FLAG: --machine-id-file="/etc/machine-id,/var/lib/dbus/machine-id" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.614524 4792 flags.go:64] FLAG: --make-iptables-util-chains="true" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.614539 4792 flags.go:64] FLAG: --manifest-url="" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.614546 4792 flags.go:64] FLAG: --manifest-url-header="" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.614555 4792 flags.go:64] FLAG: --max-housekeeping-interval="15s" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.614561 4792 flags.go:64] FLAG: --max-open-files="1000000" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.614568 4792 flags.go:64] FLAG: --max-pods="110" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.614573 4792 flags.go:64] FLAG: --maximum-dead-containers="-1" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.614579 4792 flags.go:64] FLAG: --maximum-dead-containers-per-container="1" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.614585 4792 flags.go:64] FLAG: --memory-manager-policy="None" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.614590 4792 flags.go:64] FLAG: --minimum-container-ttl-duration="6m0s" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.614596 4792 flags.go:64] FLAG: --minimum-image-ttl-duration="2m0s" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.614602 4792 flags.go:64] FLAG: --node-ip="192.168.126.11" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.614608 4792 flags.go:64] FLAG: --node-labels="node-role.kubernetes.io/control-plane=,node-role.kubernetes.io/master=,node.openshift.io/os_id=rhcos" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.614620 4792 flags.go:64] FLAG: --node-status-max-images="50" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.614626 4792 flags.go:64] FLAG: --node-status-update-frequency="10s" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.614632 4792 flags.go:64] FLAG: --oom-score-adj="-999" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.614638 4792 flags.go:64] FLAG: --pod-cidr="" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.614643 4792 flags.go:64] FLAG: --pod-infra-container-image="quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:33549946e22a9ffa738fd94b1345f90921bc8f92fa6137784cb33c77ad806f9d" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.614653 4792 flags.go:64] FLAG: --pod-manifest-path="" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.614658 4792 flags.go:64] FLAG: --pod-max-pids="-1" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.614664 4792 flags.go:64] FLAG: --pods-per-core="0" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.614670 4792 flags.go:64] FLAG: --port="10250" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.614676 4792 flags.go:64] FLAG: --protect-kernel-defaults="false" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.614681 4792 flags.go:64] FLAG: --provider-id="" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.614688 4792 flags.go:64] FLAG: --qos-reserved="" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.614694 4792 flags.go:64] FLAG: --read-only-port="10255" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.614699 4792 flags.go:64] FLAG: --register-node="true" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.614706 4792 flags.go:64] FLAG: --register-schedulable="true" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.614711 4792 flags.go:64] FLAG: --register-with-taints="node-role.kubernetes.io/master=:NoSchedule" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.614721 4792 flags.go:64] FLAG: --registry-burst="10" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.614727 4792 flags.go:64] FLAG: --registry-qps="5" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.614732 4792 flags.go:64] FLAG: --reserved-cpus="" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.614738 4792 flags.go:64] FLAG: --reserved-memory="" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.614745 4792 flags.go:64] FLAG: --resolv-conf="/etc/resolv.conf" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.614755 4792 flags.go:64] FLAG: --root-dir="/var/lib/kubelet" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.614761 4792 flags.go:64] FLAG: --rotate-certificates="false" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.614766 4792 flags.go:64] FLAG: --rotate-server-certificates="false" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.614772 4792 flags.go:64] FLAG: --runonce="false" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.614777 4792 flags.go:64] FLAG: --runtime-cgroups="/system.slice/crio.service" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.614783 4792 flags.go:64] FLAG: --runtime-request-timeout="2m0s" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.614790 4792 flags.go:64] FLAG: --seccomp-default="false" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.614795 4792 flags.go:64] FLAG: --serialize-image-pulls="true" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.614801 4792 flags.go:64] FLAG: --storage-driver-buffer-duration="1m0s" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.614806 4792 flags.go:64] FLAG: --storage-driver-db="cadvisor" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.614812 4792 flags.go:64] FLAG: --storage-driver-host="localhost:8086" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.614818 4792 flags.go:64] FLAG: --storage-driver-password="root" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.614823 4792 flags.go:64] FLAG: --storage-driver-secure="false" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.614829 4792 flags.go:64] FLAG: --storage-driver-table="stats" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.614834 4792 flags.go:64] FLAG: --storage-driver-user="root" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.614840 4792 flags.go:64] FLAG: --streaming-connection-idle-timeout="4h0m0s" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.614869 4792 flags.go:64] FLAG: --sync-frequency="1m0s" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.614879 4792 flags.go:64] FLAG: --system-cgroups="" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.614886 4792 flags.go:64] FLAG: --system-reserved="cpu=200m,ephemeral-storage=350Mi,memory=350Mi" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.614897 4792 flags.go:64] FLAG: --system-reserved-cgroup="" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.614904 4792 flags.go:64] FLAG: --tls-cert-file="" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.614912 4792 flags.go:64] FLAG: --tls-cipher-suites="[]" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.614922 4792 flags.go:64] FLAG: --tls-min-version="" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.614929 4792 flags.go:64] FLAG: --tls-private-key-file="" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.614935 4792 flags.go:64] FLAG: --topology-manager-policy="none" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.614942 4792 flags.go:64] FLAG: --topology-manager-policy-options="" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.614949 4792 flags.go:64] FLAG: --topology-manager-scope="container" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.614956 4792 flags.go:64] FLAG: --v="2" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.614965 4792 flags.go:64] FLAG: --version="false" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.614981 4792 flags.go:64] FLAG: --vmodule="" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.614988 4792 flags.go:64] FLAG: --volume-plugin-dir="/etc/kubernetes/kubelet-plugins/volume/exec" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.614995 4792 flags.go:64] FLAG: --volume-stats-agg-period="1m0s" Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.615168 4792 feature_gate.go:330] unrecognized feature gate: SignatureStores Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.615178 4792 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.615184 4792 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.615192 4792 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.615203 4792 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.615210 4792 feature_gate.go:330] unrecognized feature gate: OVNObservability Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.615218 4792 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.615224 4792 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.615230 4792 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.615235 4792 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.615240 4792 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.615246 4792 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.615251 4792 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.615256 4792 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.615261 4792 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.615267 4792 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.615273 4792 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.615278 4792 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.615283 4792 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.615291 4792 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.615297 4792 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.615304 4792 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.615311 4792 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.615317 4792 feature_gate.go:330] unrecognized feature gate: Example Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.615323 4792 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.615329 4792 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.615334 4792 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.615340 4792 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.615345 4792 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.615350 4792 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.615356 4792 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.615361 4792 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.615367 4792 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.615376 4792 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.615383 4792 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.615389 4792 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.615398 4792 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.615404 4792 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.615409 4792 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.615414 4792 feature_gate.go:330] unrecognized feature gate: GatewayAPI Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.615420 4792 feature_gate.go:330] unrecognized feature gate: PlatformOperators Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.615427 4792 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.615434 4792 feature_gate.go:330] unrecognized feature gate: NewOLM Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.615440 4792 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.615446 4792 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.615452 4792 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.615457 4792 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.615462 4792 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.615468 4792 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.615473 4792 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.615479 4792 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.615484 4792 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.615490 4792 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.615495 4792 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.615500 4792 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.615506 4792 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.615511 4792 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.615516 4792 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.615522 4792 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.615528 4792 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.615533 4792 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.615538 4792 feature_gate.go:330] unrecognized feature gate: PinnedImages Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.615543 4792 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.615548 4792 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.615554 4792 feature_gate.go:330] unrecognized feature gate: InsightsConfig Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.615560 4792 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.615566 4792 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.615571 4792 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.615580 4792 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.615586 4792 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.615591 4792 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.615608 4792 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.626510 4792 server.go:491] "Kubelet version" kubeletVersion="v1.31.5" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.626540 4792 server.go:493] "Golang settings" GOGC="" GOMAXPROCS="" GOTRACEBACK="" Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.626620 4792 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.626628 4792 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.626634 4792 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.626639 4792 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.626644 4792 feature_gate.go:330] unrecognized feature gate: SignatureStores Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.626649 4792 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.626653 4792 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.626661 4792 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.626667 4792 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.626672 4792 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.626677 4792 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.626682 4792 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.626687 4792 feature_gate.go:330] unrecognized feature gate: PinnedImages Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.626692 4792 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.626697 4792 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.626701 4792 feature_gate.go:330] unrecognized feature gate: GatewayAPI Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.626708 4792 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.626713 4792 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.626719 4792 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.626724 4792 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.626729 4792 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.626734 4792 feature_gate.go:330] unrecognized feature gate: OVNObservability Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.626739 4792 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.626744 4792 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.626758 4792 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.626763 4792 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.626768 4792 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.626773 4792 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.626778 4792 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.626783 4792 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.626789 4792 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.626794 4792 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.626799 4792 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.626804 4792 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.626808 4792 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.626813 4792 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.626818 4792 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.626823 4792 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.626828 4792 feature_gate.go:330] unrecognized feature gate: PlatformOperators Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.626833 4792 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.626838 4792 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.626842 4792 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.626863 4792 feature_gate.go:330] unrecognized feature gate: NewOLM Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.626869 4792 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.626874 4792 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.626879 4792 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.626884 4792 feature_gate.go:330] unrecognized feature gate: Example Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.626888 4792 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.626893 4792 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.626898 4792 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.626903 4792 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.626908 4792 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.626913 4792 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.626917 4792 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.626922 4792 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.626927 4792 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.626932 4792 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.626937 4792 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.626944 4792 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.626951 4792 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.626964 4792 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.626970 4792 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.626976 4792 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.626981 4792 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.626986 4792 feature_gate.go:330] unrecognized feature gate: InsightsConfig Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.626990 4792 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.626995 4792 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.627001 4792 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.627007 4792 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.627013 4792 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.627018 4792 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.627026 4792 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.627220 4792 feature_gate.go:330] unrecognized feature gate: PlatformOperators Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.627229 4792 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.627236 4792 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.627242 4792 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.627247 4792 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.627253 4792 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.627258 4792 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.627263 4792 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.627269 4792 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.627274 4792 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.627279 4792 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.627284 4792 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.627289 4792 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.627294 4792 feature_gate.go:330] unrecognized feature gate: GatewayAPI Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.627299 4792 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.627303 4792 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.627308 4792 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.627313 4792 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.627318 4792 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.627323 4792 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.627328 4792 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.627334 4792 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.627340 4792 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.627347 4792 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.627367 4792 feature_gate.go:330] unrecognized feature gate: NewOLM Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.627372 4792 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.627378 4792 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.627383 4792 feature_gate.go:330] unrecognized feature gate: SignatureStores Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.627389 4792 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.627394 4792 feature_gate.go:330] unrecognized feature gate: PinnedImages Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.627399 4792 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.627403 4792 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.627408 4792 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.627413 4792 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.627418 4792 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.627423 4792 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.627428 4792 feature_gate.go:330] unrecognized feature gate: OVNObservability Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.627432 4792 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.627437 4792 feature_gate.go:330] unrecognized feature gate: InsightsConfig Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.627442 4792 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.627447 4792 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.627452 4792 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.627457 4792 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.627462 4792 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.627468 4792 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.627474 4792 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.627480 4792 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.627485 4792 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.627490 4792 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.627496 4792 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.627501 4792 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.627506 4792 feature_gate.go:330] unrecognized feature gate: Example Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.627511 4792 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.627516 4792 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.627522 4792 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.627526 4792 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.627532 4792 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.627537 4792 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.627543 4792 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.627549 4792 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.627566 4792 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.627572 4792 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.627578 4792 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.627583 4792 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.627587 4792 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.627592 4792 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.627599 4792 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.627605 4792 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.627610 4792 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.627615 4792 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.627620 4792 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.627628 4792 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.628323 4792 server.go:940] "Client rotation is on, will bootstrap in background" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.633126 4792 bootstrap.go:85] "Current kubeconfig file contents are still valid, no bootstrap necessary" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.633204 4792 certificate_store.go:130] Loading cert/key pair from "/var/lib/kubelet/pki/kubelet-client-current.pem". Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.634986 4792 server.go:997] "Starting client certificate rotation" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.635015 4792 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate rotation is enabled Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.636928 4792 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate expiration is 2026-02-24 05:52:08 +0000 UTC, rotation deadline is 2025-11-17 23:50:19.828860674 +0000 UTC Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.637047 4792 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Waiting 1180h53m51.191820191s for next certificate rotation Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.662249 4792 dynamic_cafile_content.go:123] "Loaded a new CA Bundle and Verifier" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.666625 4792 dynamic_cafile_content.go:161] "Starting controller" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.749907 4792 log.go:25] "Validated CRI v1 runtime API" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.858234 4792 log.go:25] "Validated CRI v1 image API" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.860514 4792 server.go:1437] "Using cgroup driver setting received from the CRI runtime" cgroupDriver="systemd" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.867835 4792 fs.go:133] Filesystem UUIDs: map[0b076daa-c26a-46d2-b3a6-72a8dbc6e257:/dev/vda4 2025-09-29-18-00-57-00:/dev/sr0 7B77-95E7:/dev/vda2 de0497b0-db1b-465a-b278-03db02455c71:/dev/vda3] Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.867915 4792 fs.go:134] Filesystem partitions: map[/dev/shm:{mountpoint:/dev/shm major:0 minor:22 fsType:tmpfs blockSize:0} /dev/vda3:{mountpoint:/boot major:252 minor:3 fsType:ext4 blockSize:0} /dev/vda4:{mountpoint:/var major:252 minor:4 fsType:xfs blockSize:0} /run:{mountpoint:/run major:0 minor:24 fsType:tmpfs blockSize:0} /run/user/1000:{mountpoint:/run/user/1000 major:0 minor:42 fsType:tmpfs blockSize:0} /tmp:{mountpoint:/tmp major:0 minor:30 fsType:tmpfs blockSize:0} /var/lib/etcd:{mountpoint:/var/lib/etcd major:0 minor:43 fsType:tmpfs blockSize:0}] Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.893097 4792 manager.go:217] Machine: {Timestamp:2025-09-29 18:56:28.888262085 +0000 UTC m=+0.881569561 CPUVendorID:AuthenticAMD NumCores:8 NumPhysicalCores:1 NumSockets:8 CpuFrequency:2800000 MemoryCapacity:25199480832 SwapCapacity:0 MemoryByType:map[] NVMInfo:{MemoryModeCapacity:0 AppDirectModeCapacity:0 AvgPowerBudget:0} HugePages:[{PageSize:1048576 NumPages:0} {PageSize:2048 NumPages:0}] MachineID:21801e6708c44f15b81395eb736a7cec SystemUUID:798197c6-3029-4938-8b57-256852c71a3e BootID:2b56982a-4dd9-4681-8997-0d414fe55985 Filesystems:[{Device:/dev/vda3 DeviceMajor:252 DeviceMinor:3 Capacity:366869504 Type:vfs Inodes:98304 HasInodes:true} {Device:/run/user/1000 DeviceMajor:0 DeviceMinor:42 Capacity:2519945216 Type:vfs Inodes:615221 HasInodes:true} {Device:/var/lib/etcd DeviceMajor:0 DeviceMinor:43 Capacity:1073741824 Type:vfs Inodes:3076108 HasInodes:true} {Device:/dev/shm DeviceMajor:0 DeviceMinor:22 Capacity:12599738368 Type:vfs Inodes:3076108 HasInodes:true} {Device:/run DeviceMajor:0 DeviceMinor:24 Capacity:5039898624 Type:vfs Inodes:819200 HasInodes:true} {Device:/dev/vda4 DeviceMajor:252 DeviceMinor:4 Capacity:85292941312 Type:vfs Inodes:41679680 HasInodes:true} {Device:/tmp DeviceMajor:0 DeviceMinor:30 Capacity:12599742464 Type:vfs Inodes:1048576 HasInodes:true}] DiskMap:map[252:0:{Name:vda Major:252 Minor:0 Size:429496729600 Scheduler:none}] NetworkDevices:[{Name:br-ex MacAddress:fa:16:3e:05:9b:65 Speed:0 Mtu:1500} {Name:br-int MacAddress:d6:39:55:2e:22:71 Speed:0 Mtu:1400} {Name:ens3 MacAddress:fa:16:3e:05:9b:65 Speed:-1 Mtu:1500} {Name:ens7 MacAddress:fa:16:3e:4b:06:44 Speed:-1 Mtu:1500} {Name:ens7.20 MacAddress:52:54:00:18:32:06 Speed:-1 Mtu:1496} {Name:ens7.21 MacAddress:52:54:00:53:10:9c Speed:-1 Mtu:1496} {Name:ens7.22 MacAddress:52:54:00:8b:c2:e7 Speed:-1 Mtu:1496} {Name:eth10 MacAddress:ca:37:d4:35:67:c4 Speed:0 Mtu:1500} {Name:ovn-k8s-mp0 MacAddress:0a:58:0a:d9:00:02 Speed:0 Mtu:1400} {Name:ovs-system MacAddress:2a:31:f0:21:10:c8 Speed:0 Mtu:1500}] Topology:[{Id:0 Memory:25199480832 HugePages:[{PageSize:1048576 NumPages:0} {PageSize:2048 NumPages:0}] Cores:[{Id:0 Threads:[0] Caches:[{Id:0 Size:32768 Type:Data Level:1} {Id:0 Size:32768 Type:Instruction Level:1} {Id:0 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:0 Size:16777216 Type:Unified Level:3}] SocketID:0 BookID: DrawerID:} {Id:0 Threads:[1] Caches:[{Id:1 Size:32768 Type:Data Level:1} {Id:1 Size:32768 Type:Instruction Level:1} {Id:1 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:1 Size:16777216 Type:Unified Level:3}] SocketID:1 BookID: DrawerID:} {Id:0 Threads:[2] Caches:[{Id:2 Size:32768 Type:Data Level:1} {Id:2 Size:32768 Type:Instruction Level:1} {Id:2 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:2 Size:16777216 Type:Unified Level:3}] SocketID:2 BookID: DrawerID:} {Id:0 Threads:[3] Caches:[{Id:3 Size:32768 Type:Data Level:1} {Id:3 Size:32768 Type:Instruction Level:1} {Id:3 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:3 Size:16777216 Type:Unified Level:3}] SocketID:3 BookID: DrawerID:} {Id:0 Threads:[4] Caches:[{Id:4 Size:32768 Type:Data Level:1} {Id:4 Size:32768 Type:Instruction Level:1} {Id:4 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:4 Size:16777216 Type:Unified Level:3}] SocketID:4 BookID: DrawerID:} {Id:0 Threads:[5] Caches:[{Id:5 Size:32768 Type:Data Level:1} {Id:5 Size:32768 Type:Instruction Level:1} {Id:5 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:5 Size:16777216 Type:Unified Level:3}] SocketID:5 BookID: DrawerID:} {Id:0 Threads:[6] Caches:[{Id:6 Size:32768 Type:Data Level:1} {Id:6 Size:32768 Type:Instruction Level:1} {Id:6 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:6 Size:16777216 Type:Unified Level:3}] SocketID:6 BookID: DrawerID:} {Id:0 Threads:[7] Caches:[{Id:7 Size:32768 Type:Data Level:1} {Id:7 Size:32768 Type:Instruction Level:1} {Id:7 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:7 Size:16777216 Type:Unified Level:3}] SocketID:7 BookID: DrawerID:}] Caches:[] Distances:[10]}] CloudProvider:Unknown InstanceType:Unknown InstanceID:None} Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.893460 4792 manager_no_libpfm.go:29] cAdvisor is build without cgo and/or libpfm support. Perf event counters are not available. Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.893653 4792 manager.go:233] Version: {KernelVersion:5.14.0-427.50.2.el9_4.x86_64 ContainerOsVersion:Red Hat Enterprise Linux CoreOS 418.94.202502100215-0 DockerVersion: DockerAPIVersion: CadvisorVersion: CadvisorRevision:} Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.896622 4792 swap_util.go:113] "Swap is on" /proc/swaps contents="Filename\t\t\t\tType\t\tSize\t\tUsed\t\tPriority" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.897117 4792 container_manager_linux.go:267] "Container manager verified user specified cgroup-root exists" cgroupRoot=[] Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.897174 4792 container_manager_linux.go:272] "Creating Container Manager object based on Node Config" nodeConfig={"NodeName":"crc","RuntimeCgroupsName":"/system.slice/crio.service","SystemCgroupsName":"/system.slice","KubeletCgroupsName":"","KubeletOOMScoreAdj":-999,"ContainerRuntime":"","CgroupsPerQOS":true,"CgroupRoot":"/","CgroupDriver":"systemd","KubeletRootDir":"/var/lib/kubelet","ProtectKernelDefaults":true,"KubeReservedCgroupName":"","SystemReservedCgroupName":"","ReservedSystemCPUs":{},"EnforceNodeAllocatable":{"pods":{}},"KubeReserved":null,"SystemReserved":{"cpu":"200m","ephemeral-storage":"350Mi","memory":"350Mi"},"HardEvictionThresholds":[{"Signal":"memory.available","Operator":"LessThan","Value":{"Quantity":"100Mi","Percentage":0},"GracePeriod":0,"MinReclaim":null},{"Signal":"nodefs.available","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.1},"GracePeriod":0,"MinReclaim":null},{"Signal":"nodefs.inodesFree","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.05},"GracePeriod":0,"MinReclaim":null},{"Signal":"imagefs.available","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.15},"GracePeriod":0,"MinReclaim":null},{"Signal":"imagefs.inodesFree","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.05},"GracePeriod":0,"MinReclaim":null}],"QOSReserved":{},"CPUManagerPolicy":"none","CPUManagerPolicyOptions":null,"TopologyManagerScope":"container","CPUManagerReconcilePeriod":10000000000,"ExperimentalMemoryManagerPolicy":"None","ExperimentalMemoryManagerReservedMemory":null,"PodPidsLimit":4096,"EnforceCPULimits":true,"CPUCFSQuotaPeriod":100000000,"TopologyManagerPolicy":"none","TopologyManagerPolicyOptions":null,"CgroupVersion":2} Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.897502 4792 topology_manager.go:138] "Creating topology manager with none policy" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.897522 4792 container_manager_linux.go:303] "Creating device plugin manager" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.898094 4792 manager.go:142] "Creating Device Plugin manager" path="/var/lib/kubelet/device-plugins/kubelet.sock" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.898154 4792 server.go:66] "Creating device plugin registration server" version="v1beta1" socket="/var/lib/kubelet/device-plugins/kubelet.sock" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.898395 4792 state_mem.go:36] "Initialized new in-memory state store" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.898537 4792 server.go:1245] "Using root directory" path="/var/lib/kubelet" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.903665 4792 kubelet.go:418] "Attempting to sync node with API server" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.903704 4792 kubelet.go:313] "Adding static pod path" path="/etc/kubernetes/manifests" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.903778 4792 file.go:69] "Watching path" path="/etc/kubernetes/manifests" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.903803 4792 kubelet.go:324] "Adding apiserver pod source" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.903822 4792 apiserver.go:42] "Waiting for node sync before watching apiserver pods" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.909281 4792 kuberuntime_manager.go:262] "Container runtime initialized" containerRuntime="cri-o" version="1.31.5-4.rhaos4.18.gitdad78d5.el9" apiVersion="v1" Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.910624 4792 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.102.83.115:6443: connect: connection refused Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.910834 4792 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.102.83.115:6443: connect: connection refused Sep 29 18:56:28 crc kubenswrapper[4792]: E0929 18:56:28.910988 4792 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.102.83.115:6443: connect: connection refused" logger="UnhandledError" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.911320 4792 certificate_store.go:130] Loading cert/key pair from "/var/lib/kubelet/pki/kubelet-server-current.pem". Sep 29 18:56:28 crc kubenswrapper[4792]: E0929 18:56:28.910779 4792 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.102.83.115:6443: connect: connection refused" logger="UnhandledError" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.914740 4792 kubelet.go:854] "Not starting ClusterTrustBundle informer because we are in static kubelet mode" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.916530 4792 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/portworx-volume" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.916709 4792 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/empty-dir" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.916812 4792 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/git-repo" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.916958 4792 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/host-path" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.917070 4792 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/nfs" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.917148 4792 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/secret" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.917218 4792 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/iscsi" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.917306 4792 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/downward-api" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.917433 4792 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/fc" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.917511 4792 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/configmap" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.917585 4792 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/projected" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.917663 4792 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/local-volume" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.921908 4792 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/csi" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.922500 4792 server.go:1280] "Started kubelet" Sep 29 18:56:28 crc systemd[1]: Started Kubernetes Kubelet. Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.925141 4792 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.115:6443: connect: connection refused Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.925550 4792 ratelimit.go:55] "Setting rate limiting for endpoint" service="podresources" qps=100 burstTokens=10 Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.932281 4792 server.go:236] "Starting to serve the podresources API" endpoint="unix:/var/lib/kubelet/pod-resources/kubelet.sock" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.931766 4792 server.go:163] "Starting to listen" address="0.0.0.0" port=10250 Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.933561 4792 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate rotation is enabled Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.933606 4792 fs_resource_analyzer.go:67] "Starting FS ResourceAnalyzer" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.936019 4792 volume_manager.go:287] "The desired_state_of_world populator starts" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.936055 4792 volume_manager.go:289] "Starting Kubelet Volume Manager" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.936245 4792 desired_state_of_world_populator.go:146] "Desired state populator starts to run" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.935273 4792 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-16 12:02:52.064682319 +0000 UTC Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.937496 4792 certificate_manager.go:356] kubernetes.io/kubelet-serving: Waiting 2609h6m23.127197098s for next certificate rotation Sep 29 18:56:28 crc kubenswrapper[4792]: E0929 18:56:28.940173 4792 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Sep 29 18:56:28 crc kubenswrapper[4792]: E0929 18:56:28.940783 4792 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.115:6443: connect: connection refused" interval="200ms" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.942590 4792 server.go:460] "Adding debug handlers to kubelet server" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.943094 4792 factory.go:219] Registration of the containerd container factory failed: unable to create containerd client: containerd: cannot unix dial containerd api service: dial unix /run/containerd/containerd.sock: connect: no such file or directory Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.943118 4792 factory.go:55] Registering systemd factory Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.943130 4792 factory.go:221] Registration of the systemd container factory successfully Sep 29 18:56:28 crc kubenswrapper[4792]: E0929 18:56:28.942389 4792 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/default/events\": dial tcp 38.102.83.115:6443: connect: connection refused" event="&Event{ObjectMeta:{crc.1869d5c90b7854f5 default 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Node,Namespace:,Name:crc,UID:crc,APIVersion:,ResourceVersion:,FieldPath:,},Reason:Starting,Message:Starting kubelet.,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-09-29 18:56:28.922467573 +0000 UTC m=+0.915774989,LastTimestamp:2025-09-29 18:56:28.922467573 +0000 UTC m=+0.915774989,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Sep 29 18:56:28 crc kubenswrapper[4792]: W0929 18:56:28.943998 4792 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.102.83.115:6443: connect: connection refused Sep 29 18:56:28 crc kubenswrapper[4792]: E0929 18:56:28.944243 4792 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.102.83.115:6443: connect: connection refused" logger="UnhandledError" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.945972 4792 factory.go:153] Registering CRI-O factory Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.945992 4792 factory.go:221] Registration of the crio container factory successfully Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.946014 4792 factory.go:103] Registering Raw factory Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.946030 4792 manager.go:1196] Started watching for new ooms in manager Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.946685 4792 manager.go:319] Starting recovery of all containers Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.949574 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz" seLinuxMountContext="" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.949615 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c" seLinuxMountContext="" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.949629 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert" seLinuxMountContext="" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.949642 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit" seLinuxMountContext="" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.949654 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="20b0d48f-5fd6-431c-a545-e3c800c7b866" volumeName="kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert" seLinuxMountContext="" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.949664 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf" seLinuxMountContext="" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.949675 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert" seLinuxMountContext="" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.949687 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct" seLinuxMountContext="" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.949702 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted" seLinuxMountContext="" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.949715 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert" seLinuxMountContext="" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.949728 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca" seLinuxMountContext="" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.949743 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config" seLinuxMountContext="" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.949754 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert" seLinuxMountContext="" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.949767 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl" seLinuxMountContext="" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.949778 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca" seLinuxMountContext="" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.949790 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy" seLinuxMountContext="" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.949801 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d751cbb-f2e2-430d-9754-c882a5e924a5" volumeName="kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl" seLinuxMountContext="" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.949815 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle" seLinuxMountContext="" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.949829 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca" seLinuxMountContext="" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.949840 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config" seLinuxMountContext="" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.949869 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs" seLinuxMountContext="" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.949882 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49ef4625-1d3a-4a9f-b595-c2433d32326d" volumeName="kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v" seLinuxMountContext="" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.949893 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config" seLinuxMountContext="" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.949927 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token" seLinuxMountContext="" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.949939 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config" seLinuxMountContext="" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.949951 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca" seLinuxMountContext="" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.949966 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert" seLinuxMountContext="" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.949979 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" volumeName="kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls" seLinuxMountContext="" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.949992 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config" seLinuxMountContext="" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.950004 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca" seLinuxMountContext="" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.950016 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls" seLinuxMountContext="" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.950027 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf" seLinuxMountContext="" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.950039 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5" seLinuxMountContext="" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.950051 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv" seLinuxMountContext="" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.950063 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz" seLinuxMountContext="" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.950075 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp" seLinuxMountContext="" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.950087 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6" seLinuxMountContext="" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.950100 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" volumeName="kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7" seLinuxMountContext="" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.950113 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle" seLinuxMountContext="" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.950126 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert" seLinuxMountContext="" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.950137 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config" seLinuxMountContext="" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.950149 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config" seLinuxMountContext="" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.950161 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle" seLinuxMountContext="" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.950172 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca" seLinuxMountContext="" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.950183 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access" seLinuxMountContext="" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.950198 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images" seLinuxMountContext="" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.950209 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" volumeName="kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert" seLinuxMountContext="" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.950222 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb" seLinuxMountContext="" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.950233 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4" seLinuxMountContext="" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.950245 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert" seLinuxMountContext="" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.950255 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config" seLinuxMountContext="" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.950267 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert" seLinuxMountContext="" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.950283 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities" seLinuxMountContext="" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.950297 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config" seLinuxMountContext="" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.950309 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert" seLinuxMountContext="" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.950320 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782" seLinuxMountContext="" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.950332 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access" seLinuxMountContext="" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.950345 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5" seLinuxMountContext="" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.950356 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr" seLinuxMountContext="" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.950367 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics" seLinuxMountContext="" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.950379 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm" seLinuxMountContext="" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.950391 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz" seLinuxMountContext="" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.950403 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config" seLinuxMountContext="" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.950415 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs" seLinuxMountContext="" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.950427 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" volumeName="kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf" seLinuxMountContext="" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.950440 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" seLinuxMountContext="" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.952612 4792 reconstruct.go:144] "Volume is marked device as uncertain and added into the actual state" volumeName="kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" deviceMountPath="/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.952646 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert" seLinuxMountContext="" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.952662 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles" seLinuxMountContext="" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.952675 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7" seLinuxMountContext="" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.952687 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert" seLinuxMountContext="" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.952700 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca" seLinuxMountContext="" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.952711 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access" seLinuxMountContext="" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.952724 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config" seLinuxMountContext="" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.952736 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7" seLinuxMountContext="" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.952748 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5b88f790-22fa-440e-b583-365168c0b23d" volumeName="kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn" seLinuxMountContext="" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.952759 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert" seLinuxMountContext="" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.952771 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca" seLinuxMountContext="" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.952783 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets" seLinuxMountContext="" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.952795 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh" seLinuxMountContext="" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.952807 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp" seLinuxMountContext="" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.952818 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert" seLinuxMountContext="" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.952830 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib" seLinuxMountContext="" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.952841 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j" seLinuxMountContext="" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.952868 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert" seLinuxMountContext="" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.952882 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle" seLinuxMountContext="" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.952892 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content" seLinuxMountContext="" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.952902 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" volumeName="kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m" seLinuxMountContext="" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.952912 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs" seLinuxMountContext="" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.952923 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv" seLinuxMountContext="" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.952934 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="37a5e44f-9a88-4405-be8a-b645485e7312" volumeName="kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls" seLinuxMountContext="" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.952945 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access" seLinuxMountContext="" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.952956 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template" seLinuxMountContext="" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.952967 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content" seLinuxMountContext="" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.952978 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca" seLinuxMountContext="" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.952989 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert" seLinuxMountContext="" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.953000 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert" seLinuxMountContext="" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.953011 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8" seLinuxMountContext="" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.953022 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls" seLinuxMountContext="" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.953034 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig" seLinuxMountContext="" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.953046 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy" seLinuxMountContext="" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.953058 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca" seLinuxMountContext="" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.953069 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" volumeName="kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls" seLinuxMountContext="" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.953080 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="d75a4c96-2883-4a0b-bab2-0fab2b6c0b49" volumeName="kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script" seLinuxMountContext="" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.953093 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88" seLinuxMountContext="" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.953112 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content" seLinuxMountContext="" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.953124 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" volumeName="kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8" seLinuxMountContext="" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.953148 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle" seLinuxMountContext="" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.953160 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth" seLinuxMountContext="" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.953174 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides" seLinuxMountContext="" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.953189 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="20b0d48f-5fd6-431c-a545-e3c800c7b866" volumeName="kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds" seLinuxMountContext="" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.953202 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls" seLinuxMountContext="" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.953215 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login" seLinuxMountContext="" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.953227 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert" seLinuxMountContext="" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.953238 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="44663579-783b-4372-86d6-acf235a62d72" volumeName="kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc" seLinuxMountContext="" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.953249 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp" seLinuxMountContext="" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.953260 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config" seLinuxMountContext="" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.953272 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client" seLinuxMountContext="" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.953282 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config" seLinuxMountContext="" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.953293 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="37a5e44f-9a88-4405-be8a-b645485e7312" volumeName="kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf" seLinuxMountContext="" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.953303 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca" seLinuxMountContext="" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.953313 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5" seLinuxMountContext="" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.953323 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd" seLinuxMountContext="" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.953334 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate" seLinuxMountContext="" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.953352 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls" seLinuxMountContext="" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.953364 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config" seLinuxMountContext="" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.953377 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert" seLinuxMountContext="" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.953389 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist" seLinuxMountContext="" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.953400 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config" seLinuxMountContext="" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.953411 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn" seLinuxMountContext="" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.953422 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg" seLinuxMountContext="" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.953433 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies" seLinuxMountContext="" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.953445 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config" seLinuxMountContext="" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.953459 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data" seLinuxMountContext="" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.953470 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config" seLinuxMountContext="" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.953482 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh" seLinuxMountContext="" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.953495 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates" seLinuxMountContext="" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.953506 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token" seLinuxMountContext="" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.953517 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session" seLinuxMountContext="" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.953526 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities" seLinuxMountContext="" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.953537 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides" seLinuxMountContext="" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.953549 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config" seLinuxMountContext="" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.953562 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb" seLinuxMountContext="" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.953576 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls" seLinuxMountContext="" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.953587 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert" seLinuxMountContext="" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.953599 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle" seLinuxMountContext="" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.953611 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert" seLinuxMountContext="" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.953622 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5b88f790-22fa-440e-b583-365168c0b23d" volumeName="kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs" seLinuxMountContext="" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.953634 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6731426b-95fe-49ff-bb5f-40441049fde2" volumeName="kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls" seLinuxMountContext="" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.953647 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config" seLinuxMountContext="" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.953659 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert" seLinuxMountContext="" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.953670 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert" seLinuxMountContext="" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.953680 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config" seLinuxMountContext="" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.953692 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca" seLinuxMountContext="" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.953703 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert" seLinuxMountContext="" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.953715 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities" seLinuxMountContext="" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.953727 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config" seLinuxMountContext="" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.953739 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3ab1a177-2de0-46d9-b765-d0d0649bb42e" volumeName="kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert" seLinuxMountContext="" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.953750 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert" seLinuxMountContext="" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.953763 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52" seLinuxMountContext="" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.953777 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls" seLinuxMountContext="" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.953878 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca" seLinuxMountContext="" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.953891 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh" seLinuxMountContext="" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.953903 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls" seLinuxMountContext="" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.953913 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides" seLinuxMountContext="" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.953927 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client" seLinuxMountContext="" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.953939 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert" seLinuxMountContext="" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.953952 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config" seLinuxMountContext="" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.953964 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert" seLinuxMountContext="" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.953975 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images" seLinuxMountContext="" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.953986 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume" seLinuxMountContext="" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.953999 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7" seLinuxMountContext="" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.954010 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key" seLinuxMountContext="" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.954021 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config" seLinuxMountContext="" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.954033 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error" seLinuxMountContext="" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.954044 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection" seLinuxMountContext="" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.954057 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token" seLinuxMountContext="" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.954069 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities" seLinuxMountContext="" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.954081 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="efdd0498-1daa-4136-9a4a-3b948c2293fc" volumeName="kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt" seLinuxMountContext="" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.954091 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="efdd0498-1daa-4136-9a4a-3b948c2293fc" volumeName="kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs" seLinuxMountContext="" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.954102 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config" seLinuxMountContext="" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.954115 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert" seLinuxMountContext="" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.954126 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" volumeName="kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca" seLinuxMountContext="" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.954136 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config" seLinuxMountContext="" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.954147 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs" seLinuxMountContext="" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.954158 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk" seLinuxMountContext="" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.954168 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client" seLinuxMountContext="" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.954179 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls" seLinuxMountContext="" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.954189 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies" seLinuxMountContext="" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.954201 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx" seLinuxMountContext="" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.954212 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token" seLinuxMountContext="" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.954223 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3ab1a177-2de0-46d9-b765-d0d0649bb42e" volumeName="kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj" seLinuxMountContext="" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.954233 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3b6479f0-333b-4a96-9adf-2099afdc2447" volumeName="kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr" seLinuxMountContext="" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.954245 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6731426b-95fe-49ff-bb5f-40441049fde2" volumeName="kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh" seLinuxMountContext="" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.954256 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert" seLinuxMountContext="" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.954266 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca" seLinuxMountContext="" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.954279 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls" seLinuxMountContext="" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.954291 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz" seLinuxMountContext="" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.954302 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle" seLinuxMountContext="" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.954314 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content" seLinuxMountContext="" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.954326 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates" seLinuxMountContext="" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.954338 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bd23aa5c-e532-4e53-bccf-e79f130c5ae8" volumeName="kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2" seLinuxMountContext="" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.954348 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" volumeName="kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85" seLinuxMountContext="" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.954360 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca" seLinuxMountContext="" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.954370 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle" seLinuxMountContext="" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.954381 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs" seLinuxMountContext="" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.954395 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="d75a4c96-2883-4a0b-bab2-0fab2b6c0b49" volumeName="kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb" seLinuxMountContext="" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.954407 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config" seLinuxMountContext="" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.954418 4792 reconstruct.go:97] "Volume reconstruction finished" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.954426 4792 reconciler.go:26] "Reconciler: start to sync state" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.964162 4792 manager.go:324] Recovery completed Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.973548 4792 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.978448 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.978538 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.978567 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.980734 4792 cpu_manager.go:225] "Starting CPU manager" policy="none" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.980755 4792 cpu_manager.go:226] "Reconciling" reconcilePeriod="10s" Sep 29 18:56:28 crc kubenswrapper[4792]: I0929 18:56:28.980774 4792 state_mem.go:36] "Initialized new in-memory state store" Sep 29 18:56:29 crc kubenswrapper[4792]: I0929 18:56:29.006114 4792 policy_none.go:49] "None policy: Start" Sep 29 18:56:29 crc kubenswrapper[4792]: I0929 18:56:29.007090 4792 memory_manager.go:170] "Starting memorymanager" policy="None" Sep 29 18:56:29 crc kubenswrapper[4792]: I0929 18:56:29.007117 4792 state_mem.go:35] "Initializing new in-memory state store" Sep 29 18:56:29 crc kubenswrapper[4792]: I0929 18:56:29.009043 4792 kubelet_network_linux.go:50] "Initialized iptables rules." protocol="IPv4" Sep 29 18:56:29 crc kubenswrapper[4792]: I0929 18:56:29.014013 4792 kubelet_network_linux.go:50] "Initialized iptables rules." protocol="IPv6" Sep 29 18:56:29 crc kubenswrapper[4792]: I0929 18:56:29.014117 4792 status_manager.go:217] "Starting to sync pod status with apiserver" Sep 29 18:56:29 crc kubenswrapper[4792]: I0929 18:56:29.014140 4792 kubelet.go:2335] "Starting kubelet main sync loop" Sep 29 18:56:29 crc kubenswrapper[4792]: E0929 18:56:29.014465 4792 kubelet.go:2359] "Skipping pod synchronization" err="[container runtime status check may not have completed yet, PLEG is not healthy: pleg has yet to be successful]" Sep 29 18:56:29 crc kubenswrapper[4792]: W0929 18:56:29.016922 4792 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.102.83.115:6443: connect: connection refused Sep 29 18:56:29 crc kubenswrapper[4792]: E0929 18:56:29.016990 4792 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.102.83.115:6443: connect: connection refused" logger="UnhandledError" Sep 29 18:56:29 crc kubenswrapper[4792]: E0929 18:56:29.040291 4792 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Sep 29 18:56:29 crc kubenswrapper[4792]: I0929 18:56:29.056827 4792 manager.go:334] "Starting Device Plugin manager" Sep 29 18:56:29 crc kubenswrapper[4792]: I0929 18:56:29.056903 4792 manager.go:513] "Failed to read data from checkpoint" checkpoint="kubelet_internal_checkpoint" err="checkpoint is not found" Sep 29 18:56:29 crc kubenswrapper[4792]: I0929 18:56:29.056920 4792 server.go:79] "Starting device plugin registration server" Sep 29 18:56:29 crc kubenswrapper[4792]: I0929 18:56:29.057384 4792 eviction_manager.go:189] "Eviction manager: starting control loop" Sep 29 18:56:29 crc kubenswrapper[4792]: I0929 18:56:29.057408 4792 container_log_manager.go:189] "Initializing container log rotate workers" workers=1 monitorPeriod="10s" Sep 29 18:56:29 crc kubenswrapper[4792]: I0929 18:56:29.057658 4792 plugin_watcher.go:51] "Plugin Watcher Start" path="/var/lib/kubelet/plugins_registry" Sep 29 18:56:29 crc kubenswrapper[4792]: I0929 18:56:29.057768 4792 plugin_manager.go:116] "The desired_state_of_world populator (plugin watcher) starts" Sep 29 18:56:29 crc kubenswrapper[4792]: I0929 18:56:29.057782 4792 plugin_manager.go:118] "Starting Kubelet Plugin Manager" Sep 29 18:56:29 crc kubenswrapper[4792]: E0929 18:56:29.065554 4792 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Sep 29 18:56:29 crc kubenswrapper[4792]: I0929 18:56:29.114912 4792 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-kube-scheduler/openshift-kube-scheduler-crc","openshift-machine-config-operator/kube-rbac-proxy-crio-crc","openshift-etcd/etcd-crc","openshift-kube-apiserver/kube-apiserver-crc","openshift-kube-controller-manager/kube-controller-manager-crc"] Sep 29 18:56:29 crc kubenswrapper[4792]: I0929 18:56:29.115022 4792 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 18:56:29 crc kubenswrapper[4792]: I0929 18:56:29.116050 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:56:29 crc kubenswrapper[4792]: I0929 18:56:29.116108 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:56:29 crc kubenswrapper[4792]: I0929 18:56:29.116126 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:56:29 crc kubenswrapper[4792]: I0929 18:56:29.116313 4792 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 18:56:29 crc kubenswrapper[4792]: I0929 18:56:29.116436 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Sep 29 18:56:29 crc kubenswrapper[4792]: I0929 18:56:29.116477 4792 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 18:56:29 crc kubenswrapper[4792]: I0929 18:56:29.117124 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:56:29 crc kubenswrapper[4792]: I0929 18:56:29.117159 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:56:29 crc kubenswrapper[4792]: I0929 18:56:29.117170 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:56:29 crc kubenswrapper[4792]: I0929 18:56:29.118082 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:56:29 crc kubenswrapper[4792]: I0929 18:56:29.118105 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:56:29 crc kubenswrapper[4792]: I0929 18:56:29.118116 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:56:29 crc kubenswrapper[4792]: I0929 18:56:29.118226 4792 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 18:56:29 crc kubenswrapper[4792]: I0929 18:56:29.118310 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Sep 29 18:56:29 crc kubenswrapper[4792]: I0929 18:56:29.118334 4792 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 18:56:29 crc kubenswrapper[4792]: I0929 18:56:29.120457 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:56:29 crc kubenswrapper[4792]: I0929 18:56:29.120481 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:56:29 crc kubenswrapper[4792]: I0929 18:56:29.120509 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:56:29 crc kubenswrapper[4792]: I0929 18:56:29.120485 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:56:29 crc kubenswrapper[4792]: I0929 18:56:29.120557 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:56:29 crc kubenswrapper[4792]: I0929 18:56:29.120520 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:56:29 crc kubenswrapper[4792]: I0929 18:56:29.120747 4792 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 18:56:29 crc kubenswrapper[4792]: I0929 18:56:29.120969 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd/etcd-crc" Sep 29 18:56:29 crc kubenswrapper[4792]: I0929 18:56:29.121014 4792 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 18:56:29 crc kubenswrapper[4792]: I0929 18:56:29.121525 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:56:29 crc kubenswrapper[4792]: I0929 18:56:29.121554 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:56:29 crc kubenswrapper[4792]: I0929 18:56:29.121566 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:56:29 crc kubenswrapper[4792]: I0929 18:56:29.121674 4792 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 18:56:29 crc kubenswrapper[4792]: I0929 18:56:29.121893 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 29 18:56:29 crc kubenswrapper[4792]: I0929 18:56:29.121927 4792 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 18:56:29 crc kubenswrapper[4792]: I0929 18:56:29.122206 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:56:29 crc kubenswrapper[4792]: I0929 18:56:29.122232 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:56:29 crc kubenswrapper[4792]: I0929 18:56:29.122242 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:56:29 crc kubenswrapper[4792]: I0929 18:56:29.122432 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:56:29 crc kubenswrapper[4792]: I0929 18:56:29.122456 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:56:29 crc kubenswrapper[4792]: I0929 18:56:29.122465 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:56:29 crc kubenswrapper[4792]: I0929 18:56:29.122592 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Sep 29 18:56:29 crc kubenswrapper[4792]: I0929 18:56:29.122631 4792 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 18:56:29 crc kubenswrapper[4792]: I0929 18:56:29.122664 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:56:29 crc kubenswrapper[4792]: I0929 18:56:29.122689 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:56:29 crc kubenswrapper[4792]: I0929 18:56:29.122699 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:56:29 crc kubenswrapper[4792]: I0929 18:56:29.124747 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:56:29 crc kubenswrapper[4792]: I0929 18:56:29.124779 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:56:29 crc kubenswrapper[4792]: I0929 18:56:29.124790 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:56:29 crc kubenswrapper[4792]: E0929 18:56:29.142319 4792 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.115:6443: connect: connection refused" interval="400ms" Sep 29 18:56:29 crc kubenswrapper[4792]: I0929 18:56:29.158544 4792 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 18:56:29 crc kubenswrapper[4792]: I0929 18:56:29.158603 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Sep 29 18:56:29 crc kubenswrapper[4792]: I0929 18:56:29.158627 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 29 18:56:29 crc kubenswrapper[4792]: I0929 18:56:29.158643 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Sep 29 18:56:29 crc kubenswrapper[4792]: I0929 18:56:29.158659 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Sep 29 18:56:29 crc kubenswrapper[4792]: I0929 18:56:29.158770 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Sep 29 18:56:29 crc kubenswrapper[4792]: I0929 18:56:29.158909 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Sep 29 18:56:29 crc kubenswrapper[4792]: I0929 18:56:29.158974 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Sep 29 18:56:29 crc kubenswrapper[4792]: I0929 18:56:29.159030 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Sep 29 18:56:29 crc kubenswrapper[4792]: I0929 18:56:29.159052 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Sep 29 18:56:29 crc kubenswrapper[4792]: I0929 18:56:29.159125 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Sep 29 18:56:29 crc kubenswrapper[4792]: I0929 18:56:29.159189 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Sep 29 18:56:29 crc kubenswrapper[4792]: I0929 18:56:29.159221 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Sep 29 18:56:29 crc kubenswrapper[4792]: I0929 18:56:29.159244 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Sep 29 18:56:29 crc kubenswrapper[4792]: I0929 18:56:29.159271 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 29 18:56:29 crc kubenswrapper[4792]: I0929 18:56:29.159290 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 29 18:56:29 crc kubenswrapper[4792]: I0929 18:56:29.159493 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:56:29 crc kubenswrapper[4792]: I0929 18:56:29.159515 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:56:29 crc kubenswrapper[4792]: I0929 18:56:29.159523 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:56:29 crc kubenswrapper[4792]: I0929 18:56:29.159541 4792 kubelet_node_status.go:76] "Attempting to register node" node="crc" Sep 29 18:56:29 crc kubenswrapper[4792]: E0929 18:56:29.159888 4792 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.115:6443: connect: connection refused" node="crc" Sep 29 18:56:29 crc kubenswrapper[4792]: I0929 18:56:29.260966 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Sep 29 18:56:29 crc kubenswrapper[4792]: I0929 18:56:29.261004 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Sep 29 18:56:29 crc kubenswrapper[4792]: I0929 18:56:29.261018 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Sep 29 18:56:29 crc kubenswrapper[4792]: I0929 18:56:29.261034 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 29 18:56:29 crc kubenswrapper[4792]: I0929 18:56:29.261054 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 29 18:56:29 crc kubenswrapper[4792]: I0929 18:56:29.261067 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Sep 29 18:56:29 crc kubenswrapper[4792]: I0929 18:56:29.261080 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 29 18:56:29 crc kubenswrapper[4792]: I0929 18:56:29.261093 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Sep 29 18:56:29 crc kubenswrapper[4792]: I0929 18:56:29.261106 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Sep 29 18:56:29 crc kubenswrapper[4792]: I0929 18:56:29.261120 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Sep 29 18:56:29 crc kubenswrapper[4792]: I0929 18:56:29.261133 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Sep 29 18:56:29 crc kubenswrapper[4792]: I0929 18:56:29.261145 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Sep 29 18:56:29 crc kubenswrapper[4792]: I0929 18:56:29.261157 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Sep 29 18:56:29 crc kubenswrapper[4792]: I0929 18:56:29.261177 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Sep 29 18:56:29 crc kubenswrapper[4792]: I0929 18:56:29.261190 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Sep 29 18:56:29 crc kubenswrapper[4792]: I0929 18:56:29.261534 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Sep 29 18:56:29 crc kubenswrapper[4792]: I0929 18:56:29.261576 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Sep 29 18:56:29 crc kubenswrapper[4792]: I0929 18:56:29.261592 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Sep 29 18:56:29 crc kubenswrapper[4792]: I0929 18:56:29.261620 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Sep 29 18:56:29 crc kubenswrapper[4792]: I0929 18:56:29.261651 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Sep 29 18:56:29 crc kubenswrapper[4792]: I0929 18:56:29.261653 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Sep 29 18:56:29 crc kubenswrapper[4792]: I0929 18:56:29.261667 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Sep 29 18:56:29 crc kubenswrapper[4792]: I0929 18:56:29.261693 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Sep 29 18:56:29 crc kubenswrapper[4792]: I0929 18:56:29.261692 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Sep 29 18:56:29 crc kubenswrapper[4792]: I0929 18:56:29.261715 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 29 18:56:29 crc kubenswrapper[4792]: I0929 18:56:29.261722 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Sep 29 18:56:29 crc kubenswrapper[4792]: I0929 18:56:29.261744 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 29 18:56:29 crc kubenswrapper[4792]: I0929 18:56:29.261748 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 29 18:56:29 crc kubenswrapper[4792]: I0929 18:56:29.261617 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Sep 29 18:56:29 crc kubenswrapper[4792]: I0929 18:56:29.261763 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Sep 29 18:56:29 crc kubenswrapper[4792]: I0929 18:56:29.360240 4792 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 18:56:29 crc kubenswrapper[4792]: I0929 18:56:29.361986 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:56:29 crc kubenswrapper[4792]: I0929 18:56:29.362014 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:56:29 crc kubenswrapper[4792]: I0929 18:56:29.362024 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:56:29 crc kubenswrapper[4792]: I0929 18:56:29.362044 4792 kubelet_node_status.go:76] "Attempting to register node" node="crc" Sep 29 18:56:29 crc kubenswrapper[4792]: E0929 18:56:29.362233 4792 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.115:6443: connect: connection refused" node="crc" Sep 29 18:56:29 crc kubenswrapper[4792]: I0929 18:56:29.463219 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Sep 29 18:56:29 crc kubenswrapper[4792]: I0929 18:56:29.488438 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Sep 29 18:56:29 crc kubenswrapper[4792]: I0929 18:56:29.508096 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd/etcd-crc" Sep 29 18:56:29 crc kubenswrapper[4792]: W0929 18:56:29.523682 4792 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd1b160f5dda77d281dd8e69ec8d817f9.slice/crio-68ca80d2355d9cc7a88659e77ac69e628f9c33c474fe8f83642f9c27383a247a WatchSource:0}: Error finding container 68ca80d2355d9cc7a88659e77ac69e628f9c33c474fe8f83642f9c27383a247a: Status 404 returned error can't find the container with id 68ca80d2355d9cc7a88659e77ac69e628f9c33c474fe8f83642f9c27383a247a Sep 29 18:56:29 crc kubenswrapper[4792]: I0929 18:56:29.527174 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 29 18:56:29 crc kubenswrapper[4792]: I0929 18:56:29.531780 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Sep 29 18:56:29 crc kubenswrapper[4792]: E0929 18:56:29.542974 4792 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.115:6443: connect: connection refused" interval="800ms" Sep 29 18:56:29 crc kubenswrapper[4792]: W0929 18:56:29.564167 4792 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf4b27818a5e8e43d0dc095d08835c792.slice/crio-48cdbcb0919251c225477e2972a4cb19052d6ec983ced10d75e62d09bbbc35ae WatchSource:0}: Error finding container 48cdbcb0919251c225477e2972a4cb19052d6ec983ced10d75e62d09bbbc35ae: Status 404 returned error can't find the container with id 48cdbcb0919251c225477e2972a4cb19052d6ec983ced10d75e62d09bbbc35ae Sep 29 18:56:29 crc kubenswrapper[4792]: W0929 18:56:29.749413 4792 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.102.83.115:6443: connect: connection refused Sep 29 18:56:29 crc kubenswrapper[4792]: E0929 18:56:29.749491 4792 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.102.83.115:6443: connect: connection refused" logger="UnhandledError" Sep 29 18:56:29 crc kubenswrapper[4792]: I0929 18:56:29.762979 4792 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 18:56:29 crc kubenswrapper[4792]: I0929 18:56:29.764229 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:56:29 crc kubenswrapper[4792]: I0929 18:56:29.764260 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:56:29 crc kubenswrapper[4792]: I0929 18:56:29.764268 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:56:29 crc kubenswrapper[4792]: I0929 18:56:29.764307 4792 kubelet_node_status.go:76] "Attempting to register node" node="crc" Sep 29 18:56:29 crc kubenswrapper[4792]: E0929 18:56:29.764631 4792 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.115:6443: connect: connection refused" node="crc" Sep 29 18:56:29 crc kubenswrapper[4792]: W0929 18:56:29.774624 4792 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.102.83.115:6443: connect: connection refused Sep 29 18:56:29 crc kubenswrapper[4792]: E0929 18:56:29.774692 4792 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.102.83.115:6443: connect: connection refused" logger="UnhandledError" Sep 29 18:56:29 crc kubenswrapper[4792]: I0929 18:56:29.926620 4792 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.115:6443: connect: connection refused Sep 29 18:56:30 crc kubenswrapper[4792]: I0929 18:56:30.021686 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"48cdbcb0919251c225477e2972a4cb19052d6ec983ced10d75e62d09bbbc35ae"} Sep 29 18:56:30 crc kubenswrapper[4792]: I0929 18:56:30.022966 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"b43d296e6380fe43d4eaff16abdfa67bb345b1b89e7be3468e14348aa86d8de7"} Sep 29 18:56:30 crc kubenswrapper[4792]: I0929 18:56:30.023954 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerStarted","Data":"68ca80d2355d9cc7a88659e77ac69e628f9c33c474fe8f83642f9c27383a247a"} Sep 29 18:56:30 crc kubenswrapper[4792]: I0929 18:56:30.024921 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"a25a837c44536528287abdf8df08fd83fb7785371097b86c8957b2a64ebe4ceb"} Sep 29 18:56:30 crc kubenswrapper[4792]: I0929 18:56:30.025790 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"f4e16da686167a71ef228050a26a6533c325ecabbf755f7c5e441b6ae3852781"} Sep 29 18:56:30 crc kubenswrapper[4792]: W0929 18:56:30.273332 4792 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.102.83.115:6443: connect: connection refused Sep 29 18:56:30 crc kubenswrapper[4792]: E0929 18:56:30.273437 4792 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.102.83.115:6443: connect: connection refused" logger="UnhandledError" Sep 29 18:56:30 crc kubenswrapper[4792]: E0929 18:56:30.344500 4792 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.115:6443: connect: connection refused" interval="1.6s" Sep 29 18:56:30 crc kubenswrapper[4792]: W0929 18:56:30.496831 4792 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.102.83.115:6443: connect: connection refused Sep 29 18:56:30 crc kubenswrapper[4792]: E0929 18:56:30.496934 4792 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.102.83.115:6443: connect: connection refused" logger="UnhandledError" Sep 29 18:56:30 crc kubenswrapper[4792]: I0929 18:56:30.565172 4792 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 18:56:30 crc kubenswrapper[4792]: I0929 18:56:30.566294 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:56:30 crc kubenswrapper[4792]: I0929 18:56:30.566339 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:56:30 crc kubenswrapper[4792]: I0929 18:56:30.566348 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:56:30 crc kubenswrapper[4792]: I0929 18:56:30.566368 4792 kubelet_node_status.go:76] "Attempting to register node" node="crc" Sep 29 18:56:30 crc kubenswrapper[4792]: E0929 18:56:30.566836 4792 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.115:6443: connect: connection refused" node="crc" Sep 29 18:56:30 crc kubenswrapper[4792]: I0929 18:56:30.925768 4792 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.115:6443: connect: connection refused Sep 29 18:56:31 crc kubenswrapper[4792]: I0929 18:56:31.029661 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"325b543480e9e1abd49c6ce98398a79ef51983b8035774b2e88447ee547733c6"} Sep 29 18:56:31 crc kubenswrapper[4792]: I0929 18:56:31.029736 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"12d3875b8db9620798f766024b1bc43b78759f42e467b67aaf87f0b0154a8fad"} Sep 29 18:56:31 crc kubenswrapper[4792]: I0929 18:56:31.031397 4792 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="6e733b73d0293211bf2e8e97dc7db49c34e8ac1ef1e6e19013183d8518345959" exitCode=0 Sep 29 18:56:31 crc kubenswrapper[4792]: I0929 18:56:31.031503 4792 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 18:56:31 crc kubenswrapper[4792]: I0929 18:56:31.031516 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"6e733b73d0293211bf2e8e97dc7db49c34e8ac1ef1e6e19013183d8518345959"} Sep 29 18:56:31 crc kubenswrapper[4792]: I0929 18:56:31.032627 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:56:31 crc kubenswrapper[4792]: I0929 18:56:31.032674 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:56:31 crc kubenswrapper[4792]: I0929 18:56:31.032692 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:56:31 crc kubenswrapper[4792]: I0929 18:56:31.033555 4792 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="a82d86bbf2e42109faae5ede8c5a9f4401fac3fadde739edd080884746cbd49e" exitCode=0 Sep 29 18:56:31 crc kubenswrapper[4792]: I0929 18:56:31.033605 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"a82d86bbf2e42109faae5ede8c5a9f4401fac3fadde739edd080884746cbd49e"} Sep 29 18:56:31 crc kubenswrapper[4792]: I0929 18:56:31.033663 4792 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 18:56:31 crc kubenswrapper[4792]: I0929 18:56:31.034538 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:56:31 crc kubenswrapper[4792]: I0929 18:56:31.034560 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:56:31 crc kubenswrapper[4792]: I0929 18:56:31.034568 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:56:31 crc kubenswrapper[4792]: I0929 18:56:31.034580 4792 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 18:56:31 crc kubenswrapper[4792]: I0929 18:56:31.035628 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:56:31 crc kubenswrapper[4792]: I0929 18:56:31.035663 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:56:31 crc kubenswrapper[4792]: I0929 18:56:31.035681 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:56:31 crc kubenswrapper[4792]: I0929 18:56:31.035749 4792 generic.go:334] "Generic (PLEG): container finished" podID="d1b160f5dda77d281dd8e69ec8d817f9" containerID="e40f49e59c0de46323cca5354482ec44b1344547bbdcfb8d4d49008881cd6f0b" exitCode=0 Sep 29 18:56:31 crc kubenswrapper[4792]: I0929 18:56:31.035783 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerDied","Data":"e40f49e59c0de46323cca5354482ec44b1344547bbdcfb8d4d49008881cd6f0b"} Sep 29 18:56:31 crc kubenswrapper[4792]: I0929 18:56:31.035838 4792 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 18:56:31 crc kubenswrapper[4792]: I0929 18:56:31.036650 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:56:31 crc kubenswrapper[4792]: I0929 18:56:31.036670 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:56:31 crc kubenswrapper[4792]: I0929 18:56:31.036678 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:56:31 crc kubenswrapper[4792]: I0929 18:56:31.037866 4792 generic.go:334] "Generic (PLEG): container finished" podID="3dcd261975c3d6b9a6ad6367fd4facd3" containerID="db5ac0d0d3d08ebd2278af02df1e4639df5c635bfee2ec6fb8293e18c648af76" exitCode=0 Sep 29 18:56:31 crc kubenswrapper[4792]: I0929 18:56:31.037886 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerDied","Data":"db5ac0d0d3d08ebd2278af02df1e4639df5c635bfee2ec6fb8293e18c648af76"} Sep 29 18:56:31 crc kubenswrapper[4792]: I0929 18:56:31.037938 4792 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 18:56:31 crc kubenswrapper[4792]: I0929 18:56:31.038588 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:56:31 crc kubenswrapper[4792]: I0929 18:56:31.038608 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:56:31 crc kubenswrapper[4792]: I0929 18:56:31.038616 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:56:31 crc kubenswrapper[4792]: E0929 18:56:31.511494 4792 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/default/events\": dial tcp 38.102.83.115:6443: connect: connection refused" event="&Event{ObjectMeta:{crc.1869d5c90b7854f5 default 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Node,Namespace:,Name:crc,UID:crc,APIVersion:,ResourceVersion:,FieldPath:,},Reason:Starting,Message:Starting kubelet.,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-09-29 18:56:28.922467573 +0000 UTC m=+0.915774989,LastTimestamp:2025-09-29 18:56:28.922467573 +0000 UTC m=+0.915774989,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Sep 29 18:56:31 crc kubenswrapper[4792]: I0929 18:56:31.926072 4792 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.115:6443: connect: connection refused Sep 29 18:56:31 crc kubenswrapper[4792]: E0929 18:56:31.945242 4792 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.115:6443: connect: connection refused" interval="3.2s" Sep 29 18:56:32 crc kubenswrapper[4792]: I0929 18:56:32.043292 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"af34e705a941f92c031edf3d214a902640010036401914f60e598a46043d5eb3"} Sep 29 18:56:32 crc kubenswrapper[4792]: I0929 18:56:32.043334 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"100ab44da711fddded7f88aa053b6a47d1c8302557d9ae6a56d4f744140e34bd"} Sep 29 18:56:32 crc kubenswrapper[4792]: I0929 18:56:32.043358 4792 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 18:56:32 crc kubenswrapper[4792]: I0929 18:56:32.045042 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:56:32 crc kubenswrapper[4792]: I0929 18:56:32.045067 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:56:32 crc kubenswrapper[4792]: I0929 18:56:32.045077 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:56:32 crc kubenswrapper[4792]: I0929 18:56:32.047442 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"659f798faed7d5c35bf7959b8e42a37f2289854714d513962716cb73a0674a27"} Sep 29 18:56:32 crc kubenswrapper[4792]: I0929 18:56:32.047477 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"38d29fa86782b007a60cbc1ea8f2ccc250dabb38c84eb61931b05fbc170e6538"} Sep 29 18:56:32 crc kubenswrapper[4792]: I0929 18:56:32.047486 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"238035b6ad975064a44e7e9e760ae9f09c9ff2735ecc69f65df3fc3176f6d3b9"} Sep 29 18:56:32 crc kubenswrapper[4792]: I0929 18:56:32.050018 4792 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="9d616dac5636335adf023c822bba36cdf312e347e77552e07e4b66422c5d5e99" exitCode=0 Sep 29 18:56:32 crc kubenswrapper[4792]: I0929 18:56:32.050088 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"9d616dac5636335adf023c822bba36cdf312e347e77552e07e4b66422c5d5e99"} Sep 29 18:56:32 crc kubenswrapper[4792]: I0929 18:56:32.050152 4792 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 18:56:32 crc kubenswrapper[4792]: I0929 18:56:32.050948 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:56:32 crc kubenswrapper[4792]: I0929 18:56:32.050978 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:56:32 crc kubenswrapper[4792]: I0929 18:56:32.050987 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:56:32 crc kubenswrapper[4792]: I0929 18:56:32.052481 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerStarted","Data":"4a5236f5aba9d9427a155b81c2896961ef195180a5eec6aa80d70ba105238fb8"} Sep 29 18:56:32 crc kubenswrapper[4792]: I0929 18:56:32.052573 4792 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 18:56:32 crc kubenswrapper[4792]: I0929 18:56:32.053209 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:56:32 crc kubenswrapper[4792]: I0929 18:56:32.053228 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:56:32 crc kubenswrapper[4792]: I0929 18:56:32.053235 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:56:32 crc kubenswrapper[4792]: I0929 18:56:32.055470 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"c31040d1362524ea1a3ee9961c3eb7b97c7c76709465df7590129430ddd3bb4c"} Sep 29 18:56:32 crc kubenswrapper[4792]: I0929 18:56:32.055496 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"d2e3b84a17bd084dfa82505af7e250aea64057db67fdf494d8653ba30a883b6d"} Sep 29 18:56:32 crc kubenswrapper[4792]: I0929 18:56:32.055506 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"1f4bce20bf980ce88d41df2f3c40acfc93739122e3173ea15ceb0122219338f8"} Sep 29 18:56:32 crc kubenswrapper[4792]: I0929 18:56:32.055550 4792 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 18:56:32 crc kubenswrapper[4792]: I0929 18:56:32.058420 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:56:32 crc kubenswrapper[4792]: I0929 18:56:32.058440 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:56:32 crc kubenswrapper[4792]: I0929 18:56:32.058449 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:56:32 crc kubenswrapper[4792]: I0929 18:56:32.099573 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Sep 29 18:56:32 crc kubenswrapper[4792]: I0929 18:56:32.167374 4792 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 18:56:32 crc kubenswrapper[4792]: I0929 18:56:32.168818 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:56:32 crc kubenswrapper[4792]: I0929 18:56:32.168840 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:56:32 crc kubenswrapper[4792]: I0929 18:56:32.168864 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:56:32 crc kubenswrapper[4792]: I0929 18:56:32.168884 4792 kubelet_node_status.go:76] "Attempting to register node" node="crc" Sep 29 18:56:32 crc kubenswrapper[4792]: E0929 18:56:32.169171 4792 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.115:6443: connect: connection refused" node="crc" Sep 29 18:56:32 crc kubenswrapper[4792]: W0929 18:56:32.347629 4792 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.102.83.115:6443: connect: connection refused Sep 29 18:56:32 crc kubenswrapper[4792]: E0929 18:56:32.347706 4792 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.102.83.115:6443: connect: connection refused" logger="UnhandledError" Sep 29 18:56:32 crc kubenswrapper[4792]: W0929 18:56:32.606163 4792 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.102.83.115:6443: connect: connection refused Sep 29 18:56:32 crc kubenswrapper[4792]: E0929 18:56:32.606275 4792 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.102.83.115:6443: connect: connection refused" logger="UnhandledError" Sep 29 18:56:33 crc kubenswrapper[4792]: I0929 18:56:33.061606 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"e509f16cf3e8d080009f208150d00750d6be0f0929bfacddddf766f9eeb7f0a2"} Sep 29 18:56:33 crc kubenswrapper[4792]: I0929 18:56:33.061672 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"07171a8c3c7812c016ee534ff1332f697d2b2cdfa70fc9d94ae6a5f312e0e433"} Sep 29 18:56:33 crc kubenswrapper[4792]: I0929 18:56:33.061677 4792 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 18:56:33 crc kubenswrapper[4792]: I0929 18:56:33.063098 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:56:33 crc kubenswrapper[4792]: I0929 18:56:33.063247 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:56:33 crc kubenswrapper[4792]: I0929 18:56:33.063371 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:56:33 crc kubenswrapper[4792]: I0929 18:56:33.065529 4792 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="a30a2374420b58a38d1b619b63b049a8560db843ede7e64fb7cd0b2ca276edc9" exitCode=0 Sep 29 18:56:33 crc kubenswrapper[4792]: I0929 18:56:33.065596 4792 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 18:56:33 crc kubenswrapper[4792]: I0929 18:56:33.065687 4792 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 18:56:33 crc kubenswrapper[4792]: I0929 18:56:33.065911 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"a30a2374420b58a38d1b619b63b049a8560db843ede7e64fb7cd0b2ca276edc9"} Sep 29 18:56:33 crc kubenswrapper[4792]: I0929 18:56:33.065965 4792 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 18:56:33 crc kubenswrapper[4792]: I0929 18:56:33.066216 4792 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 18:56:33 crc kubenswrapper[4792]: I0929 18:56:33.066638 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:56:33 crc kubenswrapper[4792]: I0929 18:56:33.066646 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:56:33 crc kubenswrapper[4792]: I0929 18:56:33.066657 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:56:33 crc kubenswrapper[4792]: I0929 18:56:33.066665 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:56:33 crc kubenswrapper[4792]: I0929 18:56:33.066677 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:56:33 crc kubenswrapper[4792]: I0929 18:56:33.066700 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:56:33 crc kubenswrapper[4792]: I0929 18:56:33.066709 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:56:33 crc kubenswrapper[4792]: I0929 18:56:33.066716 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:56:33 crc kubenswrapper[4792]: I0929 18:56:33.066776 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:56:33 crc kubenswrapper[4792]: I0929 18:56:33.066784 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:56:33 crc kubenswrapper[4792]: I0929 18:56:33.066791 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:56:33 crc kubenswrapper[4792]: I0929 18:56:33.066666 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:56:33 crc kubenswrapper[4792]: I0929 18:56:33.725315 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 29 18:56:34 crc kubenswrapper[4792]: I0929 18:56:34.071286 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"24b7729bcfb1fa9218287d13a92880bd4fa05837c64712fb00ede2f4b4d635ff"} Sep 29 18:56:34 crc kubenswrapper[4792]: I0929 18:56:34.071334 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"b8cf6951d7dcfab678b6562ddf04934a1a524f70515f54aec8fcfd9750eb003a"} Sep 29 18:56:34 crc kubenswrapper[4792]: I0929 18:56:34.071349 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"2a0ea04c23229fcb69912b827cbfcabec8a2970c0f2c4e05be55002e6c96974d"} Sep 29 18:56:34 crc kubenswrapper[4792]: I0929 18:56:34.071336 4792 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Sep 29 18:56:34 crc kubenswrapper[4792]: I0929 18:56:34.071376 4792 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 18:56:34 crc kubenswrapper[4792]: I0929 18:56:34.071396 4792 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 18:56:34 crc kubenswrapper[4792]: I0929 18:56:34.072211 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:56:34 crc kubenswrapper[4792]: I0929 18:56:34.072238 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:56:34 crc kubenswrapper[4792]: I0929 18:56:34.072267 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:56:34 crc kubenswrapper[4792]: I0929 18:56:34.072504 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:56:34 crc kubenswrapper[4792]: I0929 18:56:34.072532 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:56:34 crc kubenswrapper[4792]: I0929 18:56:34.072542 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:56:35 crc kubenswrapper[4792]: I0929 18:56:35.008494 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 29 18:56:35 crc kubenswrapper[4792]: I0929 18:56:35.078602 4792 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 18:56:35 crc kubenswrapper[4792]: I0929 18:56:35.079007 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"57be197b36518b67a2b6084083ebf3e3193ce566b66367ea7313f4610f54f281"} Sep 29 18:56:35 crc kubenswrapper[4792]: I0929 18:56:35.079062 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"2a25d09a01e28aedb304e438d1a82e96016c799670f7e37b15d26b95e1e03ecd"} Sep 29 18:56:35 crc kubenswrapper[4792]: I0929 18:56:35.079112 4792 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 18:56:35 crc kubenswrapper[4792]: I0929 18:56:35.079683 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:56:35 crc kubenswrapper[4792]: I0929 18:56:35.079716 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:56:35 crc kubenswrapper[4792]: I0929 18:56:35.079728 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:56:35 crc kubenswrapper[4792]: I0929 18:56:35.080169 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:56:35 crc kubenswrapper[4792]: I0929 18:56:35.080207 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:56:35 crc kubenswrapper[4792]: I0929 18:56:35.080222 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:56:35 crc kubenswrapper[4792]: I0929 18:56:35.172924 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Sep 29 18:56:35 crc kubenswrapper[4792]: I0929 18:56:35.173107 4792 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 18:56:35 crc kubenswrapper[4792]: I0929 18:56:35.174449 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:56:35 crc kubenswrapper[4792]: I0929 18:56:35.174486 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:56:35 crc kubenswrapper[4792]: I0929 18:56:35.174498 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:56:35 crc kubenswrapper[4792]: I0929 18:56:35.199432 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Sep 29 18:56:35 crc kubenswrapper[4792]: I0929 18:56:35.369988 4792 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 18:56:35 crc kubenswrapper[4792]: I0929 18:56:35.371715 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:56:35 crc kubenswrapper[4792]: I0929 18:56:35.371768 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:56:35 crc kubenswrapper[4792]: I0929 18:56:35.371779 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:56:35 crc kubenswrapper[4792]: I0929 18:56:35.371805 4792 kubelet_node_status.go:76] "Attempting to register node" node="crc" Sep 29 18:56:36 crc kubenswrapper[4792]: I0929 18:56:36.081987 4792 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 18:56:36 crc kubenswrapper[4792]: I0929 18:56:36.082157 4792 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 18:56:36 crc kubenswrapper[4792]: I0929 18:56:36.082196 4792 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 18:56:36 crc kubenswrapper[4792]: I0929 18:56:36.082332 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Sep 29 18:56:36 crc kubenswrapper[4792]: I0929 18:56:36.084321 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:56:36 crc kubenswrapper[4792]: I0929 18:56:36.084354 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:56:36 crc kubenswrapper[4792]: I0929 18:56:36.084384 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:56:36 crc kubenswrapper[4792]: I0929 18:56:36.084392 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:56:36 crc kubenswrapper[4792]: I0929 18:56:36.084414 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:56:36 crc kubenswrapper[4792]: I0929 18:56:36.084434 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:56:36 crc kubenswrapper[4792]: I0929 18:56:36.084879 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:56:36 crc kubenswrapper[4792]: I0929 18:56:36.084925 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:56:36 crc kubenswrapper[4792]: I0929 18:56:36.084945 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:56:36 crc kubenswrapper[4792]: I0929 18:56:36.583476 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-etcd/etcd-crc" Sep 29 18:56:37 crc kubenswrapper[4792]: I0929 18:56:37.084542 4792 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 18:56:37 crc kubenswrapper[4792]: I0929 18:56:37.084636 4792 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 18:56:37 crc kubenswrapper[4792]: I0929 18:56:37.086053 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:56:37 crc kubenswrapper[4792]: I0929 18:56:37.086085 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:56:37 crc kubenswrapper[4792]: I0929 18:56:37.086112 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:56:37 crc kubenswrapper[4792]: I0929 18:56:37.086337 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:56:37 crc kubenswrapper[4792]: I0929 18:56:37.086402 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:56:37 crc kubenswrapper[4792]: I0929 18:56:37.086427 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:56:37 crc kubenswrapper[4792]: I0929 18:56:37.381798 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 29 18:56:37 crc kubenswrapper[4792]: I0929 18:56:37.381994 4792 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 18:56:37 crc kubenswrapper[4792]: I0929 18:56:37.383359 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:56:37 crc kubenswrapper[4792]: I0929 18:56:37.383404 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:56:37 crc kubenswrapper[4792]: I0929 18:56:37.383422 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:56:39 crc kubenswrapper[4792]: E0929 18:56:39.065713 4792 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Sep 29 18:56:39 crc kubenswrapper[4792]: I0929 18:56:39.734955 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Sep 29 18:56:39 crc kubenswrapper[4792]: I0929 18:56:39.735170 4792 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 18:56:39 crc kubenswrapper[4792]: I0929 18:56:39.736669 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:56:39 crc kubenswrapper[4792]: I0929 18:56:39.736912 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:56:39 crc kubenswrapper[4792]: I0929 18:56:39.736942 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:56:39 crc kubenswrapper[4792]: I0929 18:56:39.754716 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Sep 29 18:56:40 crc kubenswrapper[4792]: I0929 18:56:40.091760 4792 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 18:56:40 crc kubenswrapper[4792]: I0929 18:56:40.093083 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:56:40 crc kubenswrapper[4792]: I0929 18:56:40.093183 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:56:40 crc kubenswrapper[4792]: I0929 18:56:40.093210 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:56:40 crc kubenswrapper[4792]: I0929 18:56:40.095628 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Sep 29 18:56:41 crc kubenswrapper[4792]: I0929 18:56:41.093978 4792 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 18:56:41 crc kubenswrapper[4792]: I0929 18:56:41.095551 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:56:41 crc kubenswrapper[4792]: I0929 18:56:41.095626 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:56:41 crc kubenswrapper[4792]: I0929 18:56:41.095662 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:56:42 crc kubenswrapper[4792]: I0929 18:56:42.755569 4792 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/cluster-policy-controller namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10357/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Sep 29 18:56:42 crc kubenswrapper[4792]: I0929 18:56:42.755645 4792 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="cluster-policy-controller" probeResult="failure" output="Get \"https://192.168.126.11:10357/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Sep 29 18:56:42 crc kubenswrapper[4792]: I0929 18:56:42.926498 4792 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": net/http: TLS handshake timeout Sep 29 18:56:42 crc kubenswrapper[4792]: W0929 18:56:42.932618 4792 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": net/http: TLS handshake timeout Sep 29 18:56:42 crc kubenswrapper[4792]: I0929 18:56:42.932697 4792 trace.go:236] Trace[817593232]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (29-Sep-2025 18:56:32.931) (total time: 10000ms): Sep 29 18:56:42 crc kubenswrapper[4792]: Trace[817593232]: ---"Objects listed" error:Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": net/http: TLS handshake timeout 10000ms (18:56:42.932) Sep 29 18:56:42 crc kubenswrapper[4792]: Trace[817593232]: [10.000787561s] [10.000787561s] END Sep 29 18:56:42 crc kubenswrapper[4792]: E0929 18:56:42.932726 4792 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": net/http: TLS handshake timeout" logger="UnhandledError" Sep 29 18:56:43 crc kubenswrapper[4792]: W0929 18:56:43.064545 4792 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": net/http: TLS handshake timeout Sep 29 18:56:43 crc kubenswrapper[4792]: I0929 18:56:43.064641 4792 trace.go:236] Trace[2046356834]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (29-Sep-2025 18:56:33.063) (total time: 10001ms): Sep 29 18:56:43 crc kubenswrapper[4792]: Trace[2046356834]: ---"Objects listed" error:Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": net/http: TLS handshake timeout 10001ms (18:56:43.064) Sep 29 18:56:43 crc kubenswrapper[4792]: Trace[2046356834]: [10.001534209s] [10.001534209s] END Sep 29 18:56:43 crc kubenswrapper[4792]: E0929 18:56:43.064667 4792 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": net/http: TLS handshake timeout" logger="UnhandledError" Sep 29 18:56:43 crc kubenswrapper[4792]: I0929 18:56:43.260579 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-etcd/etcd-crc" Sep 29 18:56:43 crc kubenswrapper[4792]: I0929 18:56:43.260890 4792 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 18:56:43 crc kubenswrapper[4792]: I0929 18:56:43.262173 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:56:43 crc kubenswrapper[4792]: I0929 18:56:43.262204 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:56:43 crc kubenswrapper[4792]: I0929 18:56:43.262218 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:56:43 crc kubenswrapper[4792]: I0929 18:56:43.359699 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-etcd/etcd-crc" Sep 29 18:56:43 crc kubenswrapper[4792]: I0929 18:56:43.421192 4792 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Liveness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": read tcp 192.168.126.11:44072->192.168.126.11:17697: read: connection reset by peer" start-of-body= Sep 29 18:56:43 crc kubenswrapper[4792]: I0929 18:56:43.421267 4792 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": read tcp 192.168.126.11:44072->192.168.126.11:17697: read: connection reset by peer" Sep 29 18:56:43 crc kubenswrapper[4792]: I0929 18:56:43.545762 4792 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 403" start-of-body={"kind":"Status","apiVersion":"v1","metadata":{},"status":"Failure","message":"forbidden: User \"system:anonymous\" cannot get path \"/livez\"","reason":"Forbidden","details":{},"code":403} Sep 29 18:56:43 crc kubenswrapper[4792]: I0929 18:56:43.545818 4792 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 403" Sep 29 18:56:43 crc kubenswrapper[4792]: I0929 18:56:43.553951 4792 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 403" start-of-body={"kind":"Status","apiVersion":"v1","metadata":{},"status":"Failure","message":"forbidden: User \"system:anonymous\" cannot get path \"/livez\"","reason":"Forbidden","details":{},"code":403} Sep 29 18:56:43 crc kubenswrapper[4792]: I0929 18:56:43.554006 4792 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 403" Sep 29 18:56:44 crc kubenswrapper[4792]: I0929 18:56:44.101624 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Sep 29 18:56:44 crc kubenswrapper[4792]: I0929 18:56:44.103183 4792 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="e509f16cf3e8d080009f208150d00750d6be0f0929bfacddddf766f9eeb7f0a2" exitCode=255 Sep 29 18:56:44 crc kubenswrapper[4792]: I0929 18:56:44.103258 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"e509f16cf3e8d080009f208150d00750d6be0f0929bfacddddf766f9eeb7f0a2"} Sep 29 18:56:44 crc kubenswrapper[4792]: I0929 18:56:44.103363 4792 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 18:56:44 crc kubenswrapper[4792]: I0929 18:56:44.103432 4792 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 18:56:44 crc kubenswrapper[4792]: I0929 18:56:44.104479 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:56:44 crc kubenswrapper[4792]: I0929 18:56:44.104518 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:56:44 crc kubenswrapper[4792]: I0929 18:56:44.104532 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:56:44 crc kubenswrapper[4792]: I0929 18:56:44.104492 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:56:44 crc kubenswrapper[4792]: I0929 18:56:44.104588 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:56:44 crc kubenswrapper[4792]: I0929 18:56:44.104599 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:56:44 crc kubenswrapper[4792]: I0929 18:56:44.105234 4792 scope.go:117] "RemoveContainer" containerID="e509f16cf3e8d080009f208150d00750d6be0f0929bfacddddf766f9eeb7f0a2" Sep 29 18:56:44 crc kubenswrapper[4792]: I0929 18:56:44.130255 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-etcd/etcd-crc" Sep 29 18:56:45 crc kubenswrapper[4792]: I0929 18:56:45.107668 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Sep 29 18:56:45 crc kubenswrapper[4792]: I0929 18:56:45.110699 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"0fdb8c0687f0a5ce31078f6d7a9b643c41ad23199eff4b2878403ee5fd31f69f"} Sep 29 18:56:45 crc kubenswrapper[4792]: I0929 18:56:45.110745 4792 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 18:56:45 crc kubenswrapper[4792]: I0929 18:56:45.111004 4792 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 18:56:45 crc kubenswrapper[4792]: I0929 18:56:45.111456 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:56:45 crc kubenswrapper[4792]: I0929 18:56:45.111481 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:56:45 crc kubenswrapper[4792]: I0929 18:56:45.111491 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:56:45 crc kubenswrapper[4792]: I0929 18:56:45.112455 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:56:45 crc kubenswrapper[4792]: I0929 18:56:45.112506 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:56:45 crc kubenswrapper[4792]: I0929 18:56:45.112525 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:56:46 crc kubenswrapper[4792]: I0929 18:56:46.114724 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/1.log" Sep 29 18:56:46 crc kubenswrapper[4792]: I0929 18:56:46.115641 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Sep 29 18:56:46 crc kubenswrapper[4792]: I0929 18:56:46.117079 4792 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="0fdb8c0687f0a5ce31078f6d7a9b643c41ad23199eff4b2878403ee5fd31f69f" exitCode=255 Sep 29 18:56:46 crc kubenswrapper[4792]: I0929 18:56:46.117118 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"0fdb8c0687f0a5ce31078f6d7a9b643c41ad23199eff4b2878403ee5fd31f69f"} Sep 29 18:56:46 crc kubenswrapper[4792]: I0929 18:56:46.117252 4792 scope.go:117] "RemoveContainer" containerID="e509f16cf3e8d080009f208150d00750d6be0f0929bfacddddf766f9eeb7f0a2" Sep 29 18:56:46 crc kubenswrapper[4792]: I0929 18:56:46.117293 4792 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 18:56:46 crc kubenswrapper[4792]: I0929 18:56:46.118102 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:56:46 crc kubenswrapper[4792]: I0929 18:56:46.118125 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:56:46 crc kubenswrapper[4792]: I0929 18:56:46.118182 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:56:46 crc kubenswrapper[4792]: I0929 18:56:46.118598 4792 scope.go:117] "RemoveContainer" containerID="0fdb8c0687f0a5ce31078f6d7a9b643c41ad23199eff4b2878403ee5fd31f69f" Sep 29 18:56:46 crc kubenswrapper[4792]: E0929 18:56:46.118745 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-apiserver-check-endpoints\" with CrashLoopBackOff: \"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\"" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" Sep 29 18:56:46 crc kubenswrapper[4792]: I0929 18:56:46.853984 4792 reflector.go:368] Caches populated for *v1.Service from k8s.io/client-go/informers/factory.go:160 Sep 29 18:56:47 crc kubenswrapper[4792]: I0929 18:56:47.121330 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/1.log" Sep 29 18:56:47 crc kubenswrapper[4792]: I0929 18:56:47.269354 4792 reflector.go:368] Caches populated for *v1.RuntimeClass from k8s.io/client-go/informers/factory.go:160 Sep 29 18:56:47 crc kubenswrapper[4792]: I0929 18:56:47.388014 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 29 18:56:47 crc kubenswrapper[4792]: I0929 18:56:47.388154 4792 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 18:56:47 crc kubenswrapper[4792]: I0929 18:56:47.389328 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:56:47 crc kubenswrapper[4792]: I0929 18:56:47.389372 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:56:47 crc kubenswrapper[4792]: I0929 18:56:47.389384 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:56:47 crc kubenswrapper[4792]: I0929 18:56:47.389974 4792 scope.go:117] "RemoveContainer" containerID="0fdb8c0687f0a5ce31078f6d7a9b643c41ad23199eff4b2878403ee5fd31f69f" Sep 29 18:56:47 crc kubenswrapper[4792]: E0929 18:56:47.390165 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-apiserver-check-endpoints\" with CrashLoopBackOff: \"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\"" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" Sep 29 18:56:47 crc kubenswrapper[4792]: I0929 18:56:47.392785 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 29 18:56:48 crc kubenswrapper[4792]: I0929 18:56:48.126348 4792 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 18:56:48 crc kubenswrapper[4792]: I0929 18:56:48.127271 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:56:48 crc kubenswrapper[4792]: I0929 18:56:48.127303 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:56:48 crc kubenswrapper[4792]: I0929 18:56:48.127322 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:56:48 crc kubenswrapper[4792]: I0929 18:56:48.127808 4792 scope.go:117] "RemoveContainer" containerID="0fdb8c0687f0a5ce31078f6d7a9b643c41ad23199eff4b2878403ee5fd31f69f" Sep 29 18:56:48 crc kubenswrapper[4792]: E0929 18:56:48.128044 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-apiserver-check-endpoints\" with CrashLoopBackOff: \"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\"" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" Sep 29 18:56:48 crc kubenswrapper[4792]: E0929 18:56:48.511889 4792 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": context deadline exceeded" interval="6.4s" Sep 29 18:56:48 crc kubenswrapper[4792]: I0929 18:56:48.513322 4792 trace.go:236] Trace[1846393437]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (29-Sep-2025 18:56:36.442) (total time: 12070ms): Sep 29 18:56:48 crc kubenswrapper[4792]: Trace[1846393437]: ---"Objects listed" error: 12070ms (18:56:48.513) Sep 29 18:56:48 crc kubenswrapper[4792]: Trace[1846393437]: [12.070846657s] [12.070846657s] END Sep 29 18:56:48 crc kubenswrapper[4792]: I0929 18:56:48.513551 4792 reflector.go:368] Caches populated for *v1.CSIDriver from k8s.io/client-go/informers/factory.go:160 Sep 29 18:56:48 crc kubenswrapper[4792]: I0929 18:56:48.513390 4792 trace.go:236] Trace[336334732]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (29-Sep-2025 18:56:36.032) (total time: 12480ms): Sep 29 18:56:48 crc kubenswrapper[4792]: Trace[336334732]: ---"Objects listed" error: 12480ms (18:56:48.513) Sep 29 18:56:48 crc kubenswrapper[4792]: Trace[336334732]: [12.480733374s] [12.480733374s] END Sep 29 18:56:48 crc kubenswrapper[4792]: I0929 18:56:48.513713 4792 reflector.go:368] Caches populated for *v1.Node from k8s.io/client-go/informers/factory.go:160 Sep 29 18:56:48 crc kubenswrapper[4792]: I0929 18:56:48.515831 4792 reconstruct.go:205] "DevicePaths of reconstructed volumes updated" Sep 29 18:56:48 crc kubenswrapper[4792]: E0929 18:56:48.528254 4792 kubelet_node_status.go:99] "Unable to register node with API server" err="nodes \"crc\" is forbidden: autoscaling.openshift.io/ManagedNode infra config cache not synchronized" node="crc" Sep 29 18:56:48 crc kubenswrapper[4792]: I0929 18:56:48.916314 4792 apiserver.go:52] "Watching apiserver" Sep 29 18:56:48 crc kubenswrapper[4792]: I0929 18:56:48.919424 4792 reflector.go:368] Caches populated for *v1.Pod from pkg/kubelet/config/apiserver.go:66 Sep 29 18:56:48 crc kubenswrapper[4792]: I0929 18:56:48.919714 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-network-operator/network-operator-58b4c7f79c-55gtf","openshift-network-console/networking-console-plugin-85b44fc459-gdk6g","openshift-network-diagnostics/network-check-source-55646444c4-trplf","openshift-network-diagnostics/network-check-target-xd92c","openshift-network-node-identity/network-node-identity-vrzqb","openshift-network-operator/iptables-alerter-4ln5h"] Sep 29 18:56:48 crc kubenswrapper[4792]: I0929 18:56:48.920072 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 18:56:48 crc kubenswrapper[4792]: E0929 18:56:48.920180 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 18:56:48 crc kubenswrapper[4792]: I0929 18:56:48.920405 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 18:56:48 crc kubenswrapper[4792]: E0929 18:56:48.920492 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 18:56:48 crc kubenswrapper[4792]: I0929 18:56:48.920086 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Sep 29 18:56:48 crc kubenswrapper[4792]: I0929 18:56:48.920508 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 18:56:48 crc kubenswrapper[4792]: I0929 18:56:48.920561 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-node-identity/network-node-identity-vrzqb" Sep 29 18:56:48 crc kubenswrapper[4792]: I0929 18:56:48.920680 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/iptables-alerter-4ln5h" Sep 29 18:56:48 crc kubenswrapper[4792]: E0929 18:56:48.920782 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 18:56:48 crc kubenswrapper[4792]: I0929 18:56:48.922485 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"kube-root-ca.crt" Sep 29 18:56:48 crc kubenswrapper[4792]: I0929 18:56:48.922497 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"env-overrides" Sep 29 18:56:48 crc kubenswrapper[4792]: I0929 18:56:48.922788 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"kube-root-ca.crt" Sep 29 18:56:48 crc kubenswrapper[4792]: I0929 18:56:48.923266 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"openshift-service-ca.crt" Sep 29 18:56:48 crc kubenswrapper[4792]: I0929 18:56:48.923644 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-operator"/"metrics-tls" Sep 29 18:56:48 crc kubenswrapper[4792]: I0929 18:56:48.924364 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"openshift-service-ca.crt" Sep 29 18:56:48 crc kubenswrapper[4792]: I0929 18:56:48.924562 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"iptables-alerter-script" Sep 29 18:56:48 crc kubenswrapper[4792]: I0929 18:56:48.924641 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"ovnkube-identity-cm" Sep 29 18:56:48 crc kubenswrapper[4792]: I0929 18:56:48.925930 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-node-identity"/"network-node-identity-cert" Sep 29 18:56:48 crc kubenswrapper[4792]: I0929 18:56:48.936812 4792 desired_state_of_world_populator.go:154] "Finished populating initial desired state of world" Sep 29 18:56:48 crc kubenswrapper[4792]: I0929 18:56:48.954680 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 18:56:48 crc kubenswrapper[4792]: I0929 18:56:48.963279 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 18:56:48 crc kubenswrapper[4792]: I0929 18:56:48.971873 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 18:56:48 crc kubenswrapper[4792]: I0929 18:56:48.982210 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 18:56:48 crc kubenswrapper[4792]: I0929 18:56:48.994730 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.009474 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.019501 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.020207 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.020234 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.020251 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.020266 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.020281 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.020315 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls\") pod \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\" (UID: \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\") " Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.020330 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.020345 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-279lb\" (UniqueName: \"kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.020340 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.020475 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.020383 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.020551 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config" (OuterVolumeSpecName: "mcc-auth-proxy-config") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "mcc-auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.020581 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.020615 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bf2bz\" (UniqueName: \"kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.020648 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.020681 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.020706 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gf66m\" (UniqueName: \"kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m\") pod \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\" (UID: \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\") " Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.020710 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.020730 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.020753 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.020781 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.020811 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.020836 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.020882 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.020908 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kfwg7\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.020919 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.020937 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.020967 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.020989 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.020996 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca" (OuterVolumeSpecName: "client-ca") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.021030 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.021066 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config" (OuterVolumeSpecName: "config") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.021092 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s4n52\" (UniqueName: \"kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.021118 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.021139 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.021164 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "96b93a3a-6083-4aea-8eab-fe1aa8245ad9" (UID: "96b93a3a-6083-4aea-8eab-fe1aa8245ad9"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.021186 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.021212 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.021223 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb" (OuterVolumeSpecName: "kube-api-access-279lb") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "kube-api-access-279lb". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.021240 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.021270 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.021282 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib" (OuterVolumeSpecName: "ovnkube-script-lib") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovnkube-script-lib". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.021294 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.021336 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.021361 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m" (OuterVolumeSpecName: "kube-api-access-gf66m") pod "a0128f3a-b052-44ed-a84e-c4c8aaf17c13" (UID: "a0128f3a-b052-44ed-a84e-c4c8aaf17c13"). InnerVolumeSpecName "kube-api-access-gf66m". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.021371 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.021398 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.021456 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.021482 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qs4fp\" (UniqueName: \"kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.021503 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.021519 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.021526 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.021530 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token" (OuterVolumeSpecName: "node-bootstrap-token") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "node-bootstrap-token". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.021569 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.021595 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pj782\" (UniqueName: \"kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.021618 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert\") pod \"20b0d48f-5fd6-431c-a545-e3c800c7b866\" (UID: \"20b0d48f-5fd6-431c-a545-e3c800c7b866\") " Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.021640 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.021663 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.021686 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.021709 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v47cf\" (UniqueName: \"kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.021734 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.021738 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates" (OuterVolumeSpecName: "available-featuregates") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "available-featuregates". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.021757 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls\") pod \"6731426b-95fe-49ff-bb5f-40441049fde2\" (UID: \"6731426b-95fe-49ff-bb5f-40441049fde2\") " Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.021780 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.021804 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.021825 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.021863 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.021887 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.021910 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.021932 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.021943 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz" (OuterVolumeSpecName: "kube-api-access-bf2bz") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "kube-api-access-bf2bz". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.021954 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.021977 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.021999 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.022020 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w7l8j\" (UniqueName: \"kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.022040 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ngvvp\" (UniqueName: \"kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.022062 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.022084 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w9rds\" (UniqueName: \"kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds\") pod \"20b0d48f-5fd6-431c-a545-e3c800c7b866\" (UID: \"20b0d48f-5fd6-431c-a545-e3c800c7b866\") " Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.022094 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.022104 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vt5rc\" (UniqueName: \"kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc\") pod \"44663579-783b-4372-86d6-acf235a62d72\" (UID: \"44663579-783b-4372-86d6-acf235a62d72\") " Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.022139 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.022163 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.022191 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.022227 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.022258 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.022269 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc" (OuterVolumeSpecName: "kube-api-access-vt5rc") pod "44663579-783b-4372-86d6-acf235a62d72" (UID: "44663579-783b-4372-86d6-acf235a62d72"). InnerVolumeSpecName "kube-api-access-vt5rc". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.022281 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.022303 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.022328 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nzwt7\" (UniqueName: \"kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7\") pod \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\" (UID: \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\") " Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.022353 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mg5zb\" (UniqueName: \"kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.022385 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.022413 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zkvpv\" (UniqueName: \"kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.022497 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.022522 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.022543 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.022572 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.022605 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x4zgh\" (UniqueName: \"kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.022633 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs\") pod \"5b88f790-22fa-440e-b583-365168c0b23d\" (UID: \"5b88f790-22fa-440e-b583-365168c0b23d\") " Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.022659 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.022682 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fqsjt\" (UniqueName: \"kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt\") pod \"efdd0498-1daa-4136-9a4a-3b948c2293fc\" (UID: \"efdd0498-1daa-4136-9a4a-3b948c2293fc\") " Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.022741 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.022777 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.022809 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.022841 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.022894 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.022916 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.022937 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.022958 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.022980 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8tdtz\" (UniqueName: \"kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.023004 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.023027 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.023048 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.023075 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.023108 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.023133 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.023158 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.023181 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d6qdx\" (UniqueName: \"kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.023203 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.023226 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zgdk5\" (UniqueName: \"kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.023252 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.023415 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pjr6v\" (UniqueName: \"kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v\") pod \"49ef4625-1d3a-4a9f-b595-c2433d32326d\" (UID: \"49ef4625-1d3a-4a9f-b595-c2433d32326d\") " Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.023440 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.023462 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qg5z5\" (UniqueName: \"kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.023484 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.023506 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.023538 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6ccd8\" (UniqueName: \"kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.023576 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.023599 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6g6sz\" (UniqueName: \"kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.023624 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mnrrd\" (UniqueName: \"kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.023648 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.023681 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.025139 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.025377 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pcxfs\" (UniqueName: \"kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.025432 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.025471 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls\") pod \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\" (UID: \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\") " Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.025502 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.022303 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config" (OuterVolumeSpecName: "config") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.022430 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.022440 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.022477 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert" (OuterVolumeSpecName: "oauth-serving-cert") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "oauth-serving-cert". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.022731 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key" (OuterVolumeSpecName: "signing-key") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "signing-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.022824 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config" (OuterVolumeSpecName: "auth-proxy-config") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.022890 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.022996 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7" (OuterVolumeSpecName: "kube-api-access-kfwg7") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "kube-api-access-kfwg7". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.027154 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist" (OuterVolumeSpecName: "cni-sysctl-allowlist") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "cni-sysctl-allowlist". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.023058 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.023069 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config" (OuterVolumeSpecName: "mcd-auth-proxy-config") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "mcd-auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.023212 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.023219 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782" (OuterVolumeSpecName: "kube-api-access-pj782") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "kube-api-access-pj782". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.023373 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert" (OuterVolumeSpecName: "cert") pod "20b0d48f-5fd6-431c-a545-e3c800c7b866" (UID: "20b0d48f-5fd6-431c-a545-e3c800c7b866"). InnerVolumeSpecName "cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.023466 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.023518 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth" (OuterVolumeSpecName: "stats-auth") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "stats-auth". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.023650 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert" (OuterVolumeSpecName: "srv-cert") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "srv-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.023670 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates" (OuterVolumeSpecName: "registry-certificates") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "registry-certificates". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.025175 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle" (OuterVolumeSpecName: "service-ca-bundle") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "service-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.025764 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities" (OuterVolumeSpecName: "utilities") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.026121 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.026412 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.026554 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca" (OuterVolumeSpecName: "service-ca") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.026666 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.027088 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52" (OuterVolumeSpecName: "kube-api-access-s4n52") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "kube-api-access-s4n52". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.028116 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.026887 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.030921 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.031500 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.031663 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca" (OuterVolumeSpecName: "etcd-service-ca") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.032122 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert" (OuterVolumeSpecName: "profile-collector-cert") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "profile-collector-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.032178 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images" (OuterVolumeSpecName: "images") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "images". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.032184 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.032342 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config" (OuterVolumeSpecName: "config") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.032492 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca" (OuterVolumeSpecName: "etcd-serving-ca") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "etcd-serving-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.032499 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz" (OuterVolumeSpecName: "kube-api-access-6g6sz") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "kube-api-access-6g6sz". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 18:56:49 crc kubenswrapper[4792]: E0929 18:56:49.032543 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 18:56:49.532520028 +0000 UTC m=+21.525827474 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.032539 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "5b88f790-22fa-440e-b583-365168c0b23d" (UID: "5b88f790-22fa-440e-b583-365168c0b23d"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.032661 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds" (OuterVolumeSpecName: "kube-api-access-w9rds") pod "20b0d48f-5fd6-431c-a545-e3c800c7b866" (UID: "20b0d48f-5fd6-431c-a545-e3c800c7b866"). InnerVolumeSpecName "kube-api-access-w9rds". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.032796 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.032804 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.032825 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config" (OuterVolumeSpecName: "config") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.032812 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.032908 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.033028 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls" (OuterVolumeSpecName: "machine-approver-tls") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "machine-approver-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.033172 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf" (OuterVolumeSpecName: "kube-api-access-v47cf") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "kube-api-access-v47cf". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.033277 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.033195 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh" (OuterVolumeSpecName: "kube-api-access-x4zgh") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "kube-api-access-x4zgh". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.033125 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities" (OuterVolumeSpecName: "utilities") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.033471 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7" (OuterVolumeSpecName: "kube-api-access-nzwt7") pod "96b93a3a-6083-4aea-8eab-fe1aa8245ad9" (UID: "96b93a3a-6083-4aea-8eab-fe1aa8245ad9"). InnerVolumeSpecName "kube-api-access-nzwt7". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.033511 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle" (OuterVolumeSpecName: "signing-cabundle") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "signing-cabundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.029434 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5" (OuterVolumeSpecName: "kube-api-access-qg5z5") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "kube-api-access-qg5z5". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.033711 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.033786 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs" (OuterVolumeSpecName: "certs") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.033921 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.033950 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.033979 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.034016 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.034162 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config" (OuterVolumeSpecName: "config") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.034177 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert" (OuterVolumeSpecName: "srv-cert") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "srv-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.034381 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.034384 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb" (OuterVolumeSpecName: "kube-api-access-mg5zb") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "kube-api-access-mg5zb". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.034432 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.033469 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.035821 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lz9wn\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.035842 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.035872 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.035889 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcgwh\" (UniqueName: \"kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.035910 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.035929 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.035944 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cfbct\" (UniqueName: \"kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.035959 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.035974 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.035989 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.036003 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.036018 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sb6h7\" (UniqueName: \"kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.036035 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.036052 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.036066 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.036081 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.036097 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.036113 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.036129 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.036145 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2d4wz\" (UniqueName: \"kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.036160 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-htfz6\" (UniqueName: \"kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.036175 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lzf88\" (UniqueName: \"kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.036197 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.036224 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.036249 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.036271 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.036291 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.036307 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.036324 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.036340 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7c4vf\" (UniqueName: \"kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.036356 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.036372 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d4lsv\" (UniqueName: \"kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.036398 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.036414 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.036430 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcphl\" (UniqueName: \"kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.036444 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.036461 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.036477 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rnphk\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.036492 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.036508 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.036524 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.036541 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.036556 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.036577 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.036607 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x7zkh\" (UniqueName: \"kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh\") pod \"6731426b-95fe-49ff-bb5f-40441049fde2\" (UID: \"6731426b-95fe-49ff-bb5f-40441049fde2\") " Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.036629 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.036655 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert\") pod \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\" (UID: \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\") " Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.036698 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w4xd4\" (UniqueName: \"kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.036714 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jkwtn\" (UniqueName: \"kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn\") pod \"5b88f790-22fa-440e-b583-365168c0b23d\" (UID: \"5b88f790-22fa-440e-b583-365168c0b23d\") " Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.036731 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.036748 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x2m85\" (UniqueName: \"kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85\") pod \"cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d\" (UID: \"cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d\") " Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.036764 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-249nr\" (UniqueName: \"kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.036779 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.036794 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.036810 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jhbk2\" (UniqueName: \"kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2\") pod \"bd23aa5c-e532-4e53-bccf-e79f130c5ae8\" (UID: \"bd23aa5c-e532-4e53-bccf-e79f130c5ae8\") " Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.036825 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9xfj7\" (UniqueName: \"kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.036841 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca\") pod \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\" (UID: \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\") " Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.036891 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4d4hj\" (UniqueName: \"kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj\") pod \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\" (UID: \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\") " Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.036907 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.036923 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.036943 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.036959 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dbsvg\" (UniqueName: \"kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.036975 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2w9zh\" (UniqueName: \"kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.036990 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.037007 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fcqwp\" (UniqueName: \"kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.037023 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.037039 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.037248 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs\") pod \"efdd0498-1daa-4136-9a4a-3b948c2293fc\" (UID: \"efdd0498-1daa-4136-9a4a-3b948c2293fc\") " Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.037278 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.037304 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.037322 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wxkg8\" (UniqueName: \"kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8\") pod \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\" (UID: \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\") " Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.037339 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tk88c\" (UniqueName: \"kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.037354 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.034560 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls" (OuterVolumeSpecName: "control-plane-machine-set-operator-tls") pod "6731426b-95fe-49ff-bb5f-40441049fde2" (UID: "6731426b-95fe-49ff-bb5f-40441049fde2"). InnerVolumeSpecName "control-plane-machine-set-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.034588 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8" (OuterVolumeSpecName: "kube-api-access-6ccd8") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "kube-api-access-6ccd8". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.034675 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz" (OuterVolumeSpecName: "kube-api-access-8tdtz") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "kube-api-access-8tdtz". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.034714 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp" (OuterVolumeSpecName: "kube-api-access-qs4fp") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "kube-api-access-qs4fp". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.034732 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate" (OuterVolumeSpecName: "default-certificate") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "default-certificate". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.037398 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rdwmf\" (UniqueName: \"kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.037419 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.037439 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.037460 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2kz5\" (UniqueName: \"kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.037481 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.037501 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.037523 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rczfb\" (UniqueName: \"kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.037546 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.037570 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.037591 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-identity-cm\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.037612 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.037639 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.037661 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.037687 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"iptables-alerter-script\" (UniqueName: \"kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.037763 4792 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.037775 4792 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.037786 4792 reconciler_common.go:293] "Volume detached for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib\") on node \"crc\" DevicePath \"\"" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.037797 4792 reconciler_common.go:293] "Volume detached for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.037807 4792 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls\") on node \"crc\" DevicePath \"\"" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.037816 4792 reconciler_common.go:293] "Volume detached for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token\") on node \"crc\" DevicePath \"\"" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.037825 4792 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client\") on node \"crc\" DevicePath \"\"" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.037833 4792 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca\") on node \"crc\" DevicePath \"\"" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.037860 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-279lb\" (UniqueName: \"kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb\") on node \"crc\" DevicePath \"\"" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.037871 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bf2bz\" (UniqueName: \"kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz\") on node \"crc\" DevicePath \"\"" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.037883 4792 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config\") on node \"crc\" DevicePath \"\"" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.037896 4792 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.037905 4792 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config\") on node \"crc\" DevicePath \"\"" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.037914 4792 reconciler_common.go:293] "Volume detached for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates\") on node \"crc\" DevicePath \"\"" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.037924 4792 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.037934 4792 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca\") on node \"crc\" DevicePath \"\"" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.037962 4792 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.037973 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gf66m\" (UniqueName: \"kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m\") on node \"crc\" DevicePath \"\"" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.037983 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kfwg7\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7\") on node \"crc\" DevicePath \"\"" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.037992 4792 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.038000 4792 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token\") on node \"crc\" DevicePath \"\"" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.038011 4792 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.038020 4792 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides\") on node \"crc\" DevicePath \"\"" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.038029 4792 reconciler_common.go:293] "Volume detached for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key\") on node \"crc\" DevicePath \"\"" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.038038 4792 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca\") on node \"crc\" DevicePath \"\"" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.038048 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s4n52\" (UniqueName: \"kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52\") on node \"crc\" DevicePath \"\"" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.038057 4792 reconciler_common.go:293] "Volume detached for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.038067 4792 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.038077 4792 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.038087 4792 reconciler_common.go:293] "Volume detached for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate\") on node \"crc\" DevicePath \"\"" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.038098 4792 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.038106 4792 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.038116 4792 reconciler_common.go:293] "Volume detached for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist\") on node \"crc\" DevicePath \"\"" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.038125 4792 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.038133 4792 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.038142 4792 reconciler_common.go:293] "Volume detached for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.038152 4792 reconciler_common.go:293] "Volume detached for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.038161 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pj782\" (UniqueName: \"kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782\") on node \"crc\" DevicePath \"\"" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.038170 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qs4fp\" (UniqueName: \"kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp\") on node \"crc\" DevicePath \"\"" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.038179 4792 reconciler_common.go:293] "Volume detached for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert\") on node \"crc\" DevicePath \"\"" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.038188 4792 reconciler_common.go:293] "Volume detached for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth\") on node \"crc\" DevicePath \"\"" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.038197 4792 reconciler_common.go:293] "Volume detached for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert\") on node \"crc\" DevicePath \"\"" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.038206 4792 reconciler_common.go:293] "Volume detached for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca\") on node \"crc\" DevicePath \"\"" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.038215 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v47cf\" (UniqueName: \"kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf\") on node \"crc\" DevicePath \"\"" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.038226 4792 reconciler_common.go:293] "Volume detached for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls\") on node \"crc\" DevicePath \"\"" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.038235 4792 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.038244 4792 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config\") on node \"crc\" DevicePath \"\"" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.038253 4792 reconciler_common.go:293] "Volume detached for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.038262 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access\") on node \"crc\" DevicePath \"\"" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.038271 4792 reconciler_common.go:293] "Volume detached for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca\") on node \"crc\" DevicePath \"\"" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.038280 4792 reconciler_common.go:293] "Volume detached for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates\") on node \"crc\" DevicePath \"\"" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.038288 4792 reconciler_common.go:293] "Volume detached for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images\") on node \"crc\" DevicePath \"\"" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.038298 4792 reconciler_common.go:293] "Volume detached for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle\") on node \"crc\" DevicePath \"\"" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.038308 4792 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca\") on node \"crc\" DevicePath \"\"" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.038316 4792 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.038325 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w9rds\" (UniqueName: \"kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds\") on node \"crc\" DevicePath \"\"" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.038336 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vt5rc\" (UniqueName: \"kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc\") on node \"crc\" DevicePath \"\"" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.038347 4792 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies\") on node \"crc\" DevicePath \"\"" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.038359 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nzwt7\" (UniqueName: \"kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7\") on node \"crc\" DevicePath \"\"" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.038371 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mg5zb\" (UniqueName: \"kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb\") on node \"crc\" DevicePath \"\"" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.038382 4792 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls\") on node \"crc\" DevicePath \"\"" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.038392 4792 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls\") on node \"crc\" DevicePath \"\"" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.038402 4792 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls\") on node \"crc\" DevicePath \"\"" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.038412 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x4zgh\" (UniqueName: \"kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh\") on node \"crc\" DevicePath \"\"" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.038421 4792 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs\") on node \"crc\" DevicePath \"\"" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.038430 4792 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config\") on node \"crc\" DevicePath \"\"" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.038439 4792 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client\") on node \"crc\" DevicePath \"\"" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.038447 4792 reconciler_common.go:293] "Volume detached for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls\") on node \"crc\" DevicePath \"\"" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.038456 4792 reconciler_common.go:293] "Volume detached for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert\") on node \"crc\" DevicePath \"\"" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.038465 4792 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config\") on node \"crc\" DevicePath \"\"" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.038473 4792 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.038482 4792 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls\") on node \"crc\" DevicePath \"\"" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.038491 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qg5z5\" (UniqueName: \"kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5\") on node \"crc\" DevicePath \"\"" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.038501 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6ccd8\" (UniqueName: \"kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8\") on node \"crc\" DevicePath \"\"" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.038509 4792 reconciler_common.go:293] "Volume detached for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert\") on node \"crc\" DevicePath \"\"" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.038519 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6g6sz\" (UniqueName: \"kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz\") on node \"crc\" DevicePath \"\"" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.038529 4792 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.038540 4792 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.038553 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access\") on node \"crc\" DevicePath \"\"" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.038563 4792 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.038572 4792 reconciler_common.go:293] "Volume detached for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs\") on node \"crc\" DevicePath \"\"" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.034904 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v" (OuterVolumeSpecName: "kube-api-access-pjr6v") pod "49ef4625-1d3a-4a9f-b595-c2433d32326d" (UID: "49ef4625-1d3a-4a9f-b595-c2433d32326d"). InnerVolumeSpecName "kube-api-access-pjr6v". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.034987 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert" (OuterVolumeSpecName: "console-serving-cert") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.035063 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls" (OuterVolumeSpecName: "machine-api-operator-tls") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "machine-api-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.035325 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j" (OuterVolumeSpecName: "kube-api-access-w7l8j") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "kube-api-access-w7l8j". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.035396 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.041569 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.041749 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca" (OuterVolumeSpecName: "service-ca") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.041933 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.042110 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr" (OuterVolumeSpecName: "kube-api-access-249nr") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "kube-api-access-249nr". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.042247 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh" (OuterVolumeSpecName: "kube-api-access-x7zkh") pod "6731426b-95fe-49ff-bb5f-40441049fde2" (UID: "6731426b-95fe-49ff-bb5f-40441049fde2"). InnerVolumeSpecName "kube-api-access-x7zkh". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.042305 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit" (OuterVolumeSpecName: "audit") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "audit". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.035638 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config" (OuterVolumeSpecName: "auth-proxy-config") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.035643 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca" (OuterVolumeSpecName: "image-import-ca") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "image-import-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.035660 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv" (OuterVolumeSpecName: "kube-api-access-zkvpv") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "kube-api-access-zkvpv". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.035683 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.036246 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5" (OuterVolumeSpecName: "kube-api-access-zgdk5") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "kube-api-access-zgdk5". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.036338 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca" (OuterVolumeSpecName: "etcd-serving-ca") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "etcd-serving-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.036491 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.036514 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config" (OuterVolumeSpecName: "console-oauth-config") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-oauth-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.036736 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert" (OuterVolumeSpecName: "apiservice-cert") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "apiservice-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.036816 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.036880 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config" (OuterVolumeSpecName: "encryption-config") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "encryption-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.037347 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.037422 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets" (OuterVolumeSpecName: "installation-pull-secrets") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "installation-pull-secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.037452 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.037632 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn" (OuterVolumeSpecName: "kube-api-access-lz9wn") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "kube-api-access-lz9wn". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.038037 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config" (OuterVolumeSpecName: "config") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.038044 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct" (OuterVolumeSpecName: "kube-api-access-cfbct") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "kube-api-access-cfbct". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.038132 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config" (OuterVolumeSpecName: "config") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.038411 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.038510 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6" (OuterVolumeSpecName: "kube-api-access-htfz6") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "kube-api-access-htfz6". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.038551 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume" (OuterVolumeSpecName: "config-volume") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.038840 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.038872 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88" (OuterVolumeSpecName: "kube-api-access-lzf88") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "kube-api-access-lzf88". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.038915 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.039179 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert" (OuterVolumeSpecName: "ovn-control-plane-metrics-cert") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "ovn-control-plane-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.039421 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy" (OuterVolumeSpecName: "cni-binary-copy") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "cni-binary-copy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.039528 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.039557 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh" (OuterVolumeSpecName: "kube-api-access-xcgwh") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "kube-api-access-xcgwh". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.040200 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config" (OuterVolumeSpecName: "config") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.040300 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.040572 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.040563 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config" (OuterVolumeSpecName: "config") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.040628 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz" (OuterVolumeSpecName: "kube-api-access-2d4wz") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "kube-api-access-2d4wz". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.040826 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf" (OuterVolumeSpecName: "kube-api-access-7c4vf") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "kube-api-access-7c4vf". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.040992 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls" (OuterVolumeSpecName: "image-registry-operator-tls") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "image-registry-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.041011 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.041052 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.041222 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7" (OuterVolumeSpecName: "kube-api-access-sb6h7") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "kube-api-access-sb6h7". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.041416 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.044210 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg" (OuterVolumeSpecName: "kube-api-access-dbsvg") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "kube-api-access-dbsvg". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.044434 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh" (OuterVolumeSpecName: "kube-api-access-2w9zh") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "kube-api-access-2w9zh". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.041514 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config" (OuterVolumeSpecName: "config") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 18:56:49 crc kubenswrapper[4792]: E0929 18:56:49.042399 4792 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.044783 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 18:56:49 crc kubenswrapper[4792]: E0929 18:56:49.044887 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-09-29 18:56:49.544638426 +0000 UTC m=+21.537945902 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.045375 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-identity-cm\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.045652 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk" (OuterVolumeSpecName: "kube-api-access-rnphk") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "kube-api-access-rnphk". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.045842 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv" (OuterVolumeSpecName: "kube-api-access-d4lsv") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "kube-api-access-d4lsv". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.045904 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp" (OuterVolumeSpecName: "kube-api-access-fcqwp") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "kube-api-access-fcqwp". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.045996 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"iptables-alerter-script\" (UniqueName: \"kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.035524 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca" (OuterVolumeSpecName: "etcd-ca") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.042729 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn" (OuterVolumeSpecName: "kube-api-access-jkwtn") pod "5b88f790-22fa-440e-b583-365168c0b23d" (UID: "5b88f790-22fa-440e-b583-365168c0b23d"). InnerVolumeSpecName "kube-api-access-jkwtn". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.043030 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert" (OuterVolumeSpecName: "package-server-manager-serving-cert") pod "3ab1a177-2de0-46d9-b765-d0d0649bb42e" (UID: "3ab1a177-2de0-46d9-b765-d0d0649bb42e"). InnerVolumeSpecName "package-server-manager-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.043259 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4" (OuterVolumeSpecName: "kube-api-access-w4xd4") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "kube-api-access-w4xd4". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.043300 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle" (OuterVolumeSpecName: "service-ca-bundle") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "service-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.043362 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config" (OuterVolumeSpecName: "config") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.043631 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config" (OuterVolumeSpecName: "config") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 18:56:49 crc kubenswrapper[4792]: E0929 18:56:49.043713 4792 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Sep 29 18:56:49 crc kubenswrapper[4792]: E0929 18:56:49.046548 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-09-29 18:56:49.546533572 +0000 UTC m=+21.539840968 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.043788 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85" (OuterVolumeSpecName: "kube-api-access-x2m85") pod "cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" (UID: "cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d"). InnerVolumeSpecName "kube-api-access-x2m85". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.044182 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs" (OuterVolumeSpecName: "webhook-certs") pod "efdd0498-1daa-4136-9a4a-3b948c2293fc" (UID: "efdd0498-1daa-4136-9a4a-3b948c2293fc"). InnerVolumeSpecName "webhook-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.041494 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config" (OuterVolumeSpecName: "config") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.044479 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.046881 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.047226 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca" (OuterVolumeSpecName: "client-ca") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.047390 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert" (OuterVolumeSpecName: "ovn-node-metrics-cert") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovn-node-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.047568 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config" (OuterVolumeSpecName: "console-config") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.047662 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.047742 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.047436 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images" (OuterVolumeSpecName: "images") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "images". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.048075 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.042340 4792 swap_util.go:74] "error creating dir to test if tmpfs noswap is enabled. Assuming not supported" mount path="" error="stat /var/lib/kubelet/plugins/kubernetes.io/empty-dir: no such file or directory" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.048412 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert" (OuterVolumeSpecName: "webhook-cert") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "webhook-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.048429 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config" (OuterVolumeSpecName: "config") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.043829 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.048731 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl" (OuterVolumeSpecName: "kube-api-access-xcphl") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "kube-api-access-xcphl". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.043618 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.049240 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7" (OuterVolumeSpecName: "kube-api-access-9xfj7") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "kube-api-access-9xfj7". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.049381 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8" (OuterVolumeSpecName: "kube-api-access-wxkg8") pod "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" (UID: "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59"). InnerVolumeSpecName "kube-api-access-wxkg8". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.042644 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities" (OuterVolumeSpecName: "utilities") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.049556 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities" (OuterVolumeSpecName: "utilities") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.049631 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c" (OuterVolumeSpecName: "kube-api-access-tk88c") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "kube-api-access-tk88c". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.049895 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2" (OuterVolumeSpecName: "kube-api-access-jhbk2") pod "bd23aa5c-e532-4e53-bccf-e79f130c5ae8" (UID: "bd23aa5c-e532-4e53-bccf-e79f130c5ae8"). InnerVolumeSpecName "kube-api-access-jhbk2". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.050242 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx" (OuterVolumeSpecName: "kube-api-access-d6qdx") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "kube-api-access-d6qdx". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.050324 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca" (OuterVolumeSpecName: "serviceca") pod "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" (UID: "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59"). InnerVolumeSpecName "serviceca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.050353 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls" (OuterVolumeSpecName: "samples-operator-tls") pod "a0128f3a-b052-44ed-a84e-c4c8aaf17c13" (UID: "a0128f3a-b052-44ed-a84e-c4c8aaf17c13"). InnerVolumeSpecName "samples-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.050421 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.050461 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.050899 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj" (OuterVolumeSpecName: "kube-api-access-4d4hj") pod "3ab1a177-2de0-46d9-b765-d0d0649bb42e" (UID: "3ab1a177-2de0-46d9-b765-d0d0649bb42e"). InnerVolumeSpecName "kube-api-access-4d4hj". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.051506 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy" (OuterVolumeSpecName: "cni-binary-copy") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "cni-binary-copy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.052857 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs" (OuterVolumeSpecName: "kube-api-access-pcxfs") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "kube-api-access-pcxfs". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.053006 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs" (OuterVolumeSpecName: "tmpfs") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "tmpfs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.053230 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.053462 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.053632 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config" (OuterVolumeSpecName: "encryption-config") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "encryption-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.031139 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp" (OuterVolumeSpecName: "kube-api-access-ngvvp") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "kube-api-access-ngvvp". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.053698 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.053818 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd" (OuterVolumeSpecName: "kube-api-access-mnrrd") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "kube-api-access-mnrrd". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.054018 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt" (OuterVolumeSpecName: "kube-api-access-fqsjt") pod "efdd0498-1daa-4136-9a4a-3b948c2293fc" (UID: "efdd0498-1daa-4136-9a4a-3b948c2293fc"). InnerVolumeSpecName "kube-api-access-fqsjt". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.057130 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config" (OuterVolumeSpecName: "multus-daemon-config") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "multus-daemon-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.057198 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls" (OuterVolumeSpecName: "registry-tls") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "registry-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.057421 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert" (OuterVolumeSpecName: "profile-collector-cert") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "profile-collector-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.057610 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.057725 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 18:56:49 crc kubenswrapper[4792]: E0929 18:56:49.058568 4792 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Sep 29 18:56:49 crc kubenswrapper[4792]: E0929 18:56:49.058591 4792 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Sep 29 18:56:49 crc kubenswrapper[4792]: E0929 18:56:49.058607 4792 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 29 18:56:49 crc kubenswrapper[4792]: E0929 18:56:49.058716 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-09-29 18:56:49.55864647 +0000 UTC m=+21.551953956 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.060157 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.062451 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.063012 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rczfb\" (UniqueName: \"kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.065883 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.065927 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config" (OuterVolumeSpecName: "config") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 18:56:49 crc kubenswrapper[4792]: E0929 18:56:49.072680 4792 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Sep 29 18:56:49 crc kubenswrapper[4792]: E0929 18:56:49.072711 4792 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Sep 29 18:56:49 crc kubenswrapper[4792]: E0929 18:56:49.072723 4792 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 29 18:56:49 crc kubenswrapper[4792]: E0929 18:56:49.072795 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-09-29 18:56:49.572754957 +0000 UTC m=+21.566062353 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.072816 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.076698 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.078335 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rdwmf\" (UniqueName: \"kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.082813 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted" (OuterVolumeSpecName: "ca-trust-extracted") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "ca-trust-extracted". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.084813 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.088130 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.088799 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2kz5\" (UniqueName: \"kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.095246 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.098268 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.104761 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.108130 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.117694 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.139666 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.139703 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.139756 4792 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies\") on node \"crc\" DevicePath \"\"" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.139766 4792 reconciler_common.go:293] "Volume detached for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.139776 4792 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client\") on node \"crc\" DevicePath \"\"" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.139784 4792 reconciler_common.go:293] "Volume detached for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca\") on node \"crc\" DevicePath \"\"" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.139792 4792 reconciler_common.go:293] "Volume detached for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca\") on node \"crc\" DevicePath \"\"" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.139801 4792 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca\") on node \"crc\" DevicePath \"\"" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.139808 4792 reconciler_common.go:293] "Volume detached for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted\") on node \"crc\" DevicePath \"\"" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.139816 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w7l8j\" (UniqueName: \"kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j\") on node \"crc\" DevicePath \"\"" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.139806 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.139825 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ngvvp\" (UniqueName: \"kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp\") on node \"crc\" DevicePath \"\"" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.139897 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.139908 4792 reconciler_common.go:293] "Volume detached for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls\") on node \"crc\" DevicePath \"\"" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.139922 4792 reconciler_common.go:293] "Volume detached for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.139932 4792 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.139945 4792 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config\") on node \"crc\" DevicePath \"\"" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.139954 4792 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls\") on node \"crc\" DevicePath \"\"" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.139964 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zkvpv\" (UniqueName: \"kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv\") on node \"crc\" DevicePath \"\"" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.139974 4792 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token\") on node \"crc\" DevicePath \"\"" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.139983 4792 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.139993 4792 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.140002 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fqsjt\" (UniqueName: \"kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt\") on node \"crc\" DevicePath \"\"" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.140013 4792 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.140022 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access\") on node \"crc\" DevicePath \"\"" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.140031 4792 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.140040 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8tdtz\" (UniqueName: \"kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz\") on node \"crc\" DevicePath \"\"" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.140048 4792 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.140057 4792 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config\") on node \"crc\" DevicePath \"\"" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.140066 4792 reconciler_common.go:293] "Volume detached for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs\") on node \"crc\" DevicePath \"\"" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.140074 4792 reconciler_common.go:293] "Volume detached for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config\") on node \"crc\" DevicePath \"\"" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.140082 4792 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config\") on node \"crc\" DevicePath \"\"" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.140091 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d6qdx\" (UniqueName: \"kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx\") on node \"crc\" DevicePath \"\"" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.140099 4792 reconciler_common.go:293] "Volume detached for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca\") on node \"crc\" DevicePath \"\"" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.140107 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zgdk5\" (UniqueName: \"kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5\") on node \"crc\" DevicePath \"\"" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.140115 4792 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token\") on node \"crc\" DevicePath \"\"" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.140123 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pjr6v\" (UniqueName: \"kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v\") on node \"crc\" DevicePath \"\"" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.140132 4792 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.140139 4792 reconciler_common.go:293] "Volume detached for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert\") on node \"crc\" DevicePath \"\"" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.140147 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mnrrd\" (UniqueName: \"kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd\") on node \"crc\" DevicePath \"\"" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.140157 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pcxfs\" (UniqueName: \"kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs\") on node \"crc\" DevicePath \"\"" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.140165 4792 reconciler_common.go:293] "Volume detached for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config\") on node \"crc\" DevicePath \"\"" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.140173 4792 reconciler_common.go:293] "Volume detached for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls\") on node \"crc\" DevicePath \"\"" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.140181 4792 reconciler_common.go:293] "Volume detached for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls\") on node \"crc\" DevicePath \"\"" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.140189 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lz9wn\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn\") on node \"crc\" DevicePath \"\"" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.140198 4792 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume\") on node \"crc\" DevicePath \"\"" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.140206 4792 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.140214 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcgwh\" (UniqueName: \"kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh\") on node \"crc\" DevicePath \"\"" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.140222 4792 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config\") on node \"crc\" DevicePath \"\"" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.140230 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cfbct\" (UniqueName: \"kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct\") on node \"crc\" DevicePath \"\"" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.140237 4792 reconciler_common.go:293] "Volume detached for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy\") on node \"crc\" DevicePath \"\"" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.140253 4792 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides\") on node \"crc\" DevicePath \"\"" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.140261 4792 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.140269 4792 reconciler_common.go:293] "Volume detached for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit\") on node \"crc\" DevicePath \"\"" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.140277 4792 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca\") on node \"crc\" DevicePath \"\"" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.140285 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sb6h7\" (UniqueName: \"kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7\") on node \"crc\" DevicePath \"\"" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.140294 4792 reconciler_common.go:293] "Volume detached for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets\") on node \"crc\" DevicePath \"\"" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.140301 4792 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config\") on node \"crc\" DevicePath \"\"" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.140310 4792 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.140319 4792 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.140329 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access\") on node \"crc\" DevicePath \"\"" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.140337 4792 reconciler_common.go:293] "Volume detached for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert\") on node \"crc\" DevicePath \"\"" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.140346 4792 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.140354 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-htfz6\" (UniqueName: \"kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6\") on node \"crc\" DevicePath \"\"" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.140365 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lzf88\" (UniqueName: \"kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88\") on node \"crc\" DevicePath \"\"" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.140373 4792 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config\") on node \"crc\" DevicePath \"\"" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.140382 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2d4wz\" (UniqueName: \"kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz\") on node \"crc\" DevicePath \"\"" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.140390 4792 reconciler_common.go:293] "Volume detached for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls\") on node \"crc\" DevicePath \"\"" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.140400 4792 reconciler_common.go:293] "Volume detached for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config\") on node \"crc\" DevicePath \"\"" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.140408 4792 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config\") on node \"crc\" DevicePath \"\"" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.140416 4792 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.140424 4792 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config\") on node \"crc\" DevicePath \"\"" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.140432 4792 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.140439 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7c4vf\" (UniqueName: \"kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf\") on node \"crc\" DevicePath \"\"" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.140449 4792 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config\") on node \"crc\" DevicePath \"\"" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.140457 4792 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca\") on node \"crc\" DevicePath \"\"" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.140465 4792 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.140474 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcphl\" (UniqueName: \"kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl\") on node \"crc\" DevicePath \"\"" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.140484 4792 reconciler_common.go:293] "Volume detached for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy\") on node \"crc\" DevicePath \"\"" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.140491 4792 reconciler_common.go:293] "Volume detached for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert\") on node \"crc\" DevicePath \"\"" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.140500 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d4lsv\" (UniqueName: \"kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv\") on node \"crc\" DevicePath \"\"" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.140509 4792 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.140517 4792 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs\") on node \"crc\" DevicePath \"\"" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.140525 4792 reconciler_common.go:293] "Volume detached for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert\") on node \"crc\" DevicePath \"\"" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.140533 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rnphk\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk\") on node \"crc\" DevicePath \"\"" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.140541 4792 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config\") on node \"crc\" DevicePath \"\"" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.140549 4792 reconciler_common.go:293] "Volume detached for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config\") on node \"crc\" DevicePath \"\"" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.140557 4792 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.140565 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x7zkh\" (UniqueName: \"kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh\") on node \"crc\" DevicePath \"\"" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.140574 4792 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.140582 4792 reconciler_common.go:293] "Volume detached for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.140590 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w4xd4\" (UniqueName: \"kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4\") on node \"crc\" DevicePath \"\"" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.140599 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jkwtn\" (UniqueName: \"kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn\") on node \"crc\" DevicePath \"\"" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.140607 4792 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config\") on node \"crc\" DevicePath \"\"" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.140615 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x2m85\" (UniqueName: \"kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85\") on node \"crc\" DevicePath \"\"" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.140623 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-249nr\" (UniqueName: \"kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr\") on node \"crc\" DevicePath \"\"" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.140631 4792 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.140638 4792 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.140645 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9xfj7\" (UniqueName: \"kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7\") on node \"crc\" DevicePath \"\"" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.140653 4792 reconciler_common.go:293] "Volume detached for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca\") on node \"crc\" DevicePath \"\"" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.140661 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4d4hj\" (UniqueName: \"kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj\") on node \"crc\" DevicePath \"\"" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.140669 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jhbk2\" (UniqueName: \"kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2\") on node \"crc\" DevicePath \"\"" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.140677 4792 reconciler_common.go:293] "Volume detached for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config\") on node \"crc\" DevicePath \"\"" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.140685 4792 reconciler_common.go:293] "Volume detached for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.140693 4792 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config\") on node \"crc\" DevicePath \"\"" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.140705 4792 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.140713 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fcqwp\" (UniqueName: \"kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp\") on node \"crc\" DevicePath \"\"" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.140721 4792 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca\") on node \"crc\" DevicePath \"\"" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.140728 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dbsvg\" (UniqueName: \"kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg\") on node \"crc\" DevicePath \"\"" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.140737 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2w9zh\" (UniqueName: \"kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh\") on node \"crc\" DevicePath \"\"" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.140745 4792 reconciler_common.go:293] "Volume detached for volume \"images\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images\") on node \"crc\" DevicePath \"\"" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.140752 4792 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.140761 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wxkg8\" (UniqueName: \"kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8\") on node \"crc\" DevicePath \"\"" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.140769 4792 reconciler_common.go:293] "Volume detached for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert\") on node \"crc\" DevicePath \"\"" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.140777 4792 reconciler_common.go:293] "Volume detached for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs\") on node \"crc\" DevicePath \"\"" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.140785 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tk88c\" (UniqueName: \"kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c\") on node \"crc\" DevicePath \"\"" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.140793 4792 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.234001 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.242657 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/iptables-alerter-4ln5h" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.249624 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-node-identity/network-node-identity-vrzqb" Sep 29 18:56:49 crc kubenswrapper[4792]: W0929 18:56:49.255978 4792 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd75a4c96_2883_4a0b_bab2_0fab2b6c0b49.slice/crio-3fa4d787961f1aefd0d6a11277a366af1dab433102cf5d979b73a9a5ac90b6fe WatchSource:0}: Error finding container 3fa4d787961f1aefd0d6a11277a366af1dab433102cf5d979b73a9a5ac90b6fe: Status 404 returned error can't find the container with id 3fa4d787961f1aefd0d6a11277a366af1dab433102cf5d979b73a9a5ac90b6fe Sep 29 18:56:49 crc kubenswrapper[4792]: W0929 18:56:49.257826 4792 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod37a5e44f_9a88_4405_be8a_b645485e7312.slice/crio-f1f8cfe04f6c6ba8b24ac31b69f550b0faef40f4de3d4fc6cab3067cb9c4a7e0 WatchSource:0}: Error finding container f1f8cfe04f6c6ba8b24ac31b69f550b0faef40f4de3d4fc6cab3067cb9c4a7e0: Status 404 returned error can't find the container with id f1f8cfe04f6c6ba8b24ac31b69f550b0faef40f4de3d4fc6cab3067cb9c4a7e0 Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.544663 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.544724 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 18:56:49 crc kubenswrapper[4792]: E0929 18:56:49.544828 4792 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Sep 29 18:56:49 crc kubenswrapper[4792]: E0929 18:56:49.544886 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-09-29 18:56:50.54487246 +0000 UTC m=+22.538179856 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Sep 29 18:56:49 crc kubenswrapper[4792]: E0929 18:56:49.544984 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 18:56:50.544972392 +0000 UTC m=+22.538279788 (durationBeforeRetry 1s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.645634 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.645676 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.645697 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 18:56:49 crc kubenswrapper[4792]: E0929 18:56:49.645773 4792 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Sep 29 18:56:49 crc kubenswrapper[4792]: E0929 18:56:49.645823 4792 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Sep 29 18:56:49 crc kubenswrapper[4792]: E0929 18:56:49.645843 4792 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Sep 29 18:56:49 crc kubenswrapper[4792]: E0929 18:56:49.645872 4792 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 29 18:56:49 crc kubenswrapper[4792]: E0929 18:56:49.645829 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-09-29 18:56:50.645811403 +0000 UTC m=+22.639118799 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Sep 29 18:56:49 crc kubenswrapper[4792]: E0929 18:56:49.645824 4792 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Sep 29 18:56:49 crc kubenswrapper[4792]: E0929 18:56:49.645949 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-09-29 18:56:50.645895435 +0000 UTC m=+22.639202841 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 29 18:56:49 crc kubenswrapper[4792]: E0929 18:56:49.645961 4792 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Sep 29 18:56:49 crc kubenswrapper[4792]: E0929 18:56:49.645975 4792 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 29 18:56:49 crc kubenswrapper[4792]: E0929 18:56:49.646015 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-09-29 18:56:50.646002008 +0000 UTC m=+22.639309424 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.760426 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.765381 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.771636 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager/kube-controller-manager-crc"] Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.772632 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.784245 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.794409 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.807190 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.815480 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.824007 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.834244 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"49187618-8fed-4b0f-bdf8-800408f708fc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://325b543480e9e1abd49c6ce98398a79ef51983b8035774b2e88447ee547733c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://12d3875b8db9620798f766024b1bc43b78759f42e467b67aaf87f0b0154a8fad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://100ab44da711fddded7f88aa053b6a47d1c8302557d9ae6a56d4f744140e34bd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://af34e705a941f92c031edf3d214a902640010036401914f60e598a46043d5eb3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:29Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.843389 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.853892 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.862053 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.874616 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.898006 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 18:56:49 crc kubenswrapper[4792]: I0929 18:56:49.909586 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 18:56:50 crc kubenswrapper[4792]: I0929 18:56:50.132676 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" event={"ID":"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49","Type":"ContainerStarted","Data":"3fa4d787961f1aefd0d6a11277a366af1dab433102cf5d979b73a9a5ac90b6fe"} Sep 29 18:56:50 crc kubenswrapper[4792]: I0929 18:56:50.134176 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" event={"ID":"37a5e44f-9a88-4405-be8a-b645485e7312","Type":"ContainerStarted","Data":"ba8b9fef5faf6504a0e363f092cc9f60b03723775a0a0624b6302b3dac43a7ba"} Sep 29 18:56:50 crc kubenswrapper[4792]: I0929 18:56:50.134238 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" event={"ID":"37a5e44f-9a88-4405-be8a-b645485e7312","Type":"ContainerStarted","Data":"f1f8cfe04f6c6ba8b24ac31b69f550b0faef40f4de3d4fc6cab3067cb9c4a7e0"} Sep 29 18:56:50 crc kubenswrapper[4792]: I0929 18:56:50.135675 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"8067c4cf598ce2d361c4a76b51ef3cf14d1fc84ad7ee193d76e20cd980f197be"} Sep 29 18:56:50 crc kubenswrapper[4792]: I0929 18:56:50.135700 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"693ef3ee15f0b8762a16adc20435397e073dea4b0028f4175899cb956eaab303"} Sep 29 18:56:50 crc kubenswrapper[4792]: I0929 18:56:50.135711 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"954e37688f297504d616cf0db00d82c8201f999349afed1a08bc24e0bdca9054"} Sep 29 18:56:50 crc kubenswrapper[4792]: I0929 18:56:50.145829 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"49187618-8fed-4b0f-bdf8-800408f708fc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://325b543480e9e1abd49c6ce98398a79ef51983b8035774b2e88447ee547733c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://12d3875b8db9620798f766024b1bc43b78759f42e467b67aaf87f0b0154a8fad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://100ab44da711fddded7f88aa053b6a47d1c8302557d9ae6a56d4f744140e34bd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://af34e705a941f92c031edf3d214a902640010036401914f60e598a46043d5eb3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:29Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 18:56:50 crc kubenswrapper[4792]: I0929 18:56:50.155361 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 18:56:50 crc kubenswrapper[4792]: I0929 18:56:50.163709 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 18:56:50 crc kubenswrapper[4792]: I0929 18:56:50.170992 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 18:56:50 crc kubenswrapper[4792]: I0929 18:56:50.181600 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ba8b9fef5faf6504a0e363f092cc9f60b03723775a0a0624b6302b3dac43a7ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:56:50Z is after 2025-08-24T17:21:41Z" Sep 29 18:56:50 crc kubenswrapper[4792]: I0929 18:56:50.193536 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:56:50Z is after 2025-08-24T17:21:41Z" Sep 29 18:56:50 crc kubenswrapper[4792]: I0929 18:56:50.203866 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:56:50Z is after 2025-08-24T17:21:41Z" Sep 29 18:56:50 crc kubenswrapper[4792]: I0929 18:56:50.213069 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:56:50Z is after 2025-08-24T17:21:41Z" Sep 29 18:56:50 crc kubenswrapper[4792]: I0929 18:56:50.223146 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8067c4cf598ce2d361c4a76b51ef3cf14d1fc84ad7ee193d76e20cd980f197be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://693ef3ee15f0b8762a16adc20435397e073dea4b0028f4175899cb956eaab303\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:56:50Z is after 2025-08-24T17:21:41Z" Sep 29 18:56:50 crc kubenswrapper[4792]: I0929 18:56:50.234203 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ba8b9fef5faf6504a0e363f092cc9f60b03723775a0a0624b6302b3dac43a7ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:56:50Z is after 2025-08-24T17:21:41Z" Sep 29 18:56:50 crc kubenswrapper[4792]: I0929 18:56:50.244292 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:56:50Z is after 2025-08-24T17:21:41Z" Sep 29 18:56:50 crc kubenswrapper[4792]: I0929 18:56:50.256328 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"49187618-8fed-4b0f-bdf8-800408f708fc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://325b543480e9e1abd49c6ce98398a79ef51983b8035774b2e88447ee547733c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://12d3875b8db9620798f766024b1bc43b78759f42e467b67aaf87f0b0154a8fad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://100ab44da711fddded7f88aa053b6a47d1c8302557d9ae6a56d4f744140e34bd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://af34e705a941f92c031edf3d214a902640010036401914f60e598a46043d5eb3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:29Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:56:50Z is after 2025-08-24T17:21:41Z" Sep 29 18:56:50 crc kubenswrapper[4792]: I0929 18:56:50.266943 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:56:50Z is after 2025-08-24T17:21:41Z" Sep 29 18:56:50 crc kubenswrapper[4792]: I0929 18:56:50.278462 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:56:50Z is after 2025-08-24T17:21:41Z" Sep 29 18:56:50 crc kubenswrapper[4792]: I0929 18:56:50.551949 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 18:56:50 crc kubenswrapper[4792]: I0929 18:56:50.552031 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 18:56:50 crc kubenswrapper[4792]: E0929 18:56:50.552167 4792 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Sep 29 18:56:50 crc kubenswrapper[4792]: E0929 18:56:50.552223 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-09-29 18:56:52.552206142 +0000 UTC m=+24.545513538 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Sep 29 18:56:50 crc kubenswrapper[4792]: E0929 18:56:50.552456 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 18:56:52.552439028 +0000 UTC m=+24.545746444 (durationBeforeRetry 2s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 18:56:50 crc kubenswrapper[4792]: I0929 18:56:50.652702 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 18:56:50 crc kubenswrapper[4792]: I0929 18:56:50.652762 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 18:56:50 crc kubenswrapper[4792]: I0929 18:56:50.652796 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 18:56:50 crc kubenswrapper[4792]: E0929 18:56:50.652891 4792 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Sep 29 18:56:50 crc kubenswrapper[4792]: E0929 18:56:50.652937 4792 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Sep 29 18:56:50 crc kubenswrapper[4792]: E0929 18:56:50.652973 4792 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Sep 29 18:56:50 crc kubenswrapper[4792]: E0929 18:56:50.652985 4792 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 29 18:56:50 crc kubenswrapper[4792]: E0929 18:56:50.652939 4792 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Sep 29 18:56:50 crc kubenswrapper[4792]: E0929 18:56:50.653034 4792 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Sep 29 18:56:50 crc kubenswrapper[4792]: E0929 18:56:50.653041 4792 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 29 18:56:50 crc kubenswrapper[4792]: E0929 18:56:50.652953 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-09-29 18:56:52.65293516 +0000 UTC m=+24.646242556 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Sep 29 18:56:50 crc kubenswrapper[4792]: E0929 18:56:50.653079 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-09-29 18:56:52.653062783 +0000 UTC m=+24.646370179 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 29 18:56:50 crc kubenswrapper[4792]: E0929 18:56:50.653092 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-09-29 18:56:52.653084984 +0000 UTC m=+24.646392380 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 29 18:56:51 crc kubenswrapper[4792]: I0929 18:56:51.015159 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 18:56:51 crc kubenswrapper[4792]: I0929 18:56:51.015160 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 18:56:51 crc kubenswrapper[4792]: I0929 18:56:51.015168 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 18:56:51 crc kubenswrapper[4792]: E0929 18:56:51.015498 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 18:56:51 crc kubenswrapper[4792]: E0929 18:56:51.015574 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 18:56:51 crc kubenswrapper[4792]: E0929 18:56:51.015650 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 18:56:51 crc kubenswrapper[4792]: I0929 18:56:51.018861 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="01ab3dd5-8196-46d0-ad33-122e2ca51def" path="/var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes" Sep 29 18:56:51 crc kubenswrapper[4792]: I0929 18:56:51.019586 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" path="/var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes" Sep 29 18:56:51 crc kubenswrapper[4792]: I0929 18:56:51.020510 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09efc573-dbb6-4249-bd59-9b87aba8dd28" path="/var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes" Sep 29 18:56:51 crc kubenswrapper[4792]: I0929 18:56:51.021248 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b574797-001e-440a-8f4e-c0be86edad0f" path="/var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes" Sep 29 18:56:51 crc kubenswrapper[4792]: I0929 18:56:51.021968 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b78653f-4ff9-4508-8672-245ed9b561e3" path="/var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes" Sep 29 18:56:51 crc kubenswrapper[4792]: I0929 18:56:51.022448 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1386a44e-36a2-460c-96d0-0359d2b6f0f5" path="/var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes" Sep 29 18:56:51 crc kubenswrapper[4792]: I0929 18:56:51.023174 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1bf7eb37-55a3-4c65-b768-a94c82151e69" path="/var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes" Sep 29 18:56:51 crc kubenswrapper[4792]: I0929 18:56:51.023835 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1d611f23-29be-4491-8495-bee1670e935f" path="/var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes" Sep 29 18:56:51 crc kubenswrapper[4792]: I0929 18:56:51.024625 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="20b0d48f-5fd6-431c-a545-e3c800c7b866" path="/var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/volumes" Sep 29 18:56:51 crc kubenswrapper[4792]: I0929 18:56:51.025277 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" path="/var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes" Sep 29 18:56:51 crc kubenswrapper[4792]: I0929 18:56:51.025794 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="22c825df-677d-4ca6-82db-3454ed06e783" path="/var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes" Sep 29 18:56:51 crc kubenswrapper[4792]: I0929 18:56:51.026479 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="25e176fe-21b4-4974-b1ed-c8b94f112a7f" path="/var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes" Sep 29 18:56:51 crc kubenswrapper[4792]: I0929 18:56:51.027066 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" path="/var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes" Sep 29 18:56:51 crc kubenswrapper[4792]: I0929 18:56:51.027550 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="31d8b7a1-420e-4252-a5b7-eebe8a111292" path="/var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes" Sep 29 18:56:51 crc kubenswrapper[4792]: I0929 18:56:51.029546 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3ab1a177-2de0-46d9-b765-d0d0649bb42e" path="/var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/volumes" Sep 29 18:56:51 crc kubenswrapper[4792]: I0929 18:56:51.030284 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" path="/var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes" Sep 29 18:56:51 crc kubenswrapper[4792]: I0929 18:56:51.030954 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="43509403-f426-496e-be36-56cef71462f5" path="/var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes" Sep 29 18:56:51 crc kubenswrapper[4792]: I0929 18:56:51.031511 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="44663579-783b-4372-86d6-acf235a62d72" path="/var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/volumes" Sep 29 18:56:51 crc kubenswrapper[4792]: I0929 18:56:51.032208 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="496e6271-fb68-4057-954e-a0d97a4afa3f" path="/var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes" Sep 29 18:56:51 crc kubenswrapper[4792]: I0929 18:56:51.032904 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" path="/var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes" Sep 29 18:56:51 crc kubenswrapper[4792]: I0929 18:56:51.033489 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49ef4625-1d3a-4a9f-b595-c2433d32326d" path="/var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/volumes" Sep 29 18:56:51 crc kubenswrapper[4792]: I0929 18:56:51.035082 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4bb40260-dbaa-4fb0-84df-5e680505d512" path="/var/lib/kubelet/pods/4bb40260-dbaa-4fb0-84df-5e680505d512/volumes" Sep 29 18:56:51 crc kubenswrapper[4792]: I0929 18:56:51.035621 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5225d0e4-402f-4861-b410-819f433b1803" path="/var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes" Sep 29 18:56:51 crc kubenswrapper[4792]: I0929 18:56:51.036382 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5441d097-087c-4d9a-baa8-b210afa90fc9" path="/var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes" Sep 29 18:56:51 crc kubenswrapper[4792]: I0929 18:56:51.036916 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="57a731c4-ef35-47a8-b875-bfb08a7f8011" path="/var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes" Sep 29 18:56:51 crc kubenswrapper[4792]: I0929 18:56:51.037609 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5b88f790-22fa-440e-b583-365168c0b23d" path="/var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/volumes" Sep 29 18:56:51 crc kubenswrapper[4792]: I0929 18:56:51.038389 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5fe579f8-e8a6-4643-bce5-a661393c4dde" path="/var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/volumes" Sep 29 18:56:51 crc kubenswrapper[4792]: I0929 18:56:51.039115 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6402fda4-df10-493c-b4e5-d0569419652d" path="/var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes" Sep 29 18:56:51 crc kubenswrapper[4792]: I0929 18:56:51.039830 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6509e943-70c6-444c-bc41-48a544e36fbd" path="/var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes" Sep 29 18:56:51 crc kubenswrapper[4792]: I0929 18:56:51.040618 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6731426b-95fe-49ff-bb5f-40441049fde2" path="/var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/volumes" Sep 29 18:56:51 crc kubenswrapper[4792]: I0929 18:56:51.041105 4792 kubelet_volumes.go:152] "Cleaned up orphaned volume subpath from pod" podUID="6ea678ab-3438-413e-bfe3-290ae7725660" path="/var/lib/kubelet/pods/6ea678ab-3438-413e-bfe3-290ae7725660/volume-subpaths/run-systemd/ovnkube-controller/6" Sep 29 18:56:51 crc kubenswrapper[4792]: I0929 18:56:51.041203 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6ea678ab-3438-413e-bfe3-290ae7725660" path="/var/lib/kubelet/pods/6ea678ab-3438-413e-bfe3-290ae7725660/volumes" Sep 29 18:56:51 crc kubenswrapper[4792]: I0929 18:56:51.042682 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7539238d-5fe0-46ed-884e-1c3b566537ec" path="/var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes" Sep 29 18:56:51 crc kubenswrapper[4792]: I0929 18:56:51.043182 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7583ce53-e0fe-4a16-9e4d-50516596a136" path="/var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes" Sep 29 18:56:51 crc kubenswrapper[4792]: I0929 18:56:51.043561 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7bb08738-c794-4ee8-9972-3a62ca171029" path="/var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes" Sep 29 18:56:51 crc kubenswrapper[4792]: I0929 18:56:51.044944 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="87cf06ed-a83f-41a7-828d-70653580a8cb" path="/var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes" Sep 29 18:56:51 crc kubenswrapper[4792]: I0929 18:56:51.048563 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" path="/var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes" Sep 29 18:56:51 crc kubenswrapper[4792]: I0929 18:56:51.049317 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="925f1c65-6136-48ba-85aa-3a3b50560753" path="/var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes" Sep 29 18:56:51 crc kubenswrapper[4792]: I0929 18:56:51.050395 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" path="/var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/volumes" Sep 29 18:56:51 crc kubenswrapper[4792]: I0929 18:56:51.051117 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9d4552c7-cd75-42dd-8880-30dd377c49a4" path="/var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes" Sep 29 18:56:51 crc kubenswrapper[4792]: I0929 18:56:51.052030 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" path="/var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/volumes" Sep 29 18:56:51 crc kubenswrapper[4792]: I0929 18:56:51.052635 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a31745f5-9847-4afe-82a5-3161cc66ca93" path="/var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes" Sep 29 18:56:51 crc kubenswrapper[4792]: I0929 18:56:51.053756 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" path="/var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes" Sep 29 18:56:51 crc kubenswrapper[4792]: I0929 18:56:51.054644 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6312bbd-5731-4ea0-a20f-81d5a57df44a" path="/var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/volumes" Sep 29 18:56:51 crc kubenswrapper[4792]: I0929 18:56:51.055099 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" path="/var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes" Sep 29 18:56:51 crc kubenswrapper[4792]: I0929 18:56:51.055585 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" path="/var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes" Sep 29 18:56:51 crc kubenswrapper[4792]: I0929 18:56:51.056048 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bd23aa5c-e532-4e53-bccf-e79f130c5ae8" path="/var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/volumes" Sep 29 18:56:51 crc kubenswrapper[4792]: I0929 18:56:51.056756 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bf126b07-da06-4140-9a57-dfd54fc6b486" path="/var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes" Sep 29 18:56:51 crc kubenswrapper[4792]: I0929 18:56:51.057214 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c03ee662-fb2f-4fc4-a2c1-af487c19d254" path="/var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes" Sep 29 18:56:51 crc kubenswrapper[4792]: I0929 18:56:51.057658 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" path="/var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/volumes" Sep 29 18:56:51 crc kubenswrapper[4792]: I0929 18:56:51.058150 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e7e6199b-1264-4501-8953-767f51328d08" path="/var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes" Sep 29 18:56:51 crc kubenswrapper[4792]: I0929 18:56:51.058718 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="efdd0498-1daa-4136-9a4a-3b948c2293fc" path="/var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/volumes" Sep 29 18:56:51 crc kubenswrapper[4792]: I0929 18:56:51.059446 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" path="/var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/volumes" Sep 29 18:56:51 crc kubenswrapper[4792]: I0929 18:56:51.060009 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fda69060-fa79-4696-b1a6-7980f124bf7c" path="/var/lib/kubelet/pods/fda69060-fa79-4696-b1a6-7980f124bf7c/volumes" Sep 29 18:56:52 crc kubenswrapper[4792]: I0929 18:56:52.143707 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" event={"ID":"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49","Type":"ContainerStarted","Data":"cb29207afd9a5fb06242890aaf6d32f2f789cbf824b0246706e7214486ac529c"} Sep 29 18:56:52 crc kubenswrapper[4792]: I0929 18:56:52.165006 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ba8b9fef5faf6504a0e363f092cc9f60b03723775a0a0624b6302b3dac43a7ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:56:52Z is after 2025-08-24T17:21:41Z" Sep 29 18:56:52 crc kubenswrapper[4792]: I0929 18:56:52.182019 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:56:52Z is after 2025-08-24T17:21:41Z" Sep 29 18:56:52 crc kubenswrapper[4792]: I0929 18:56:52.196978 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8067c4cf598ce2d361c4a76b51ef3cf14d1fc84ad7ee193d76e20cd980f197be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://693ef3ee15f0b8762a16adc20435397e073dea4b0028f4175899cb956eaab303\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:56:52Z is after 2025-08-24T17:21:41Z" Sep 29 18:56:52 crc kubenswrapper[4792]: I0929 18:56:52.214757 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"49187618-8fed-4b0f-bdf8-800408f708fc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://325b543480e9e1abd49c6ce98398a79ef51983b8035774b2e88447ee547733c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://12d3875b8db9620798f766024b1bc43b78759f42e467b67aaf87f0b0154a8fad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://100ab44da711fddded7f88aa053b6a47d1c8302557d9ae6a56d4f744140e34bd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://af34e705a941f92c031edf3d214a902640010036401914f60e598a46043d5eb3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:29Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:56:52Z is after 2025-08-24T17:21:41Z" Sep 29 18:56:52 crc kubenswrapper[4792]: I0929 18:56:52.231256 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:56:52Z is after 2025-08-24T17:21:41Z" Sep 29 18:56:52 crc kubenswrapper[4792]: I0929 18:56:52.249606 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:56:52Z is after 2025-08-24T17:21:41Z" Sep 29 18:56:52 crc kubenswrapper[4792]: I0929 18:56:52.263672 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:52Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:52Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cb29207afd9a5fb06242890aaf6d32f2f789cbf824b0246706e7214486ac529c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:56:52Z is after 2025-08-24T17:21:41Z" Sep 29 18:56:52 crc kubenswrapper[4792]: I0929 18:56:52.567291 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 18:56:52 crc kubenswrapper[4792]: I0929 18:56:52.567358 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 18:56:52 crc kubenswrapper[4792]: E0929 18:56:52.567428 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 18:56:56.56741022 +0000 UTC m=+28.560717616 (durationBeforeRetry 4s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 18:56:52 crc kubenswrapper[4792]: E0929 18:56:52.567467 4792 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Sep 29 18:56:52 crc kubenswrapper[4792]: E0929 18:56:52.567513 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-09-29 18:56:56.567501332 +0000 UTC m=+28.560808728 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Sep 29 18:56:52 crc kubenswrapper[4792]: I0929 18:56:52.668244 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 18:56:52 crc kubenswrapper[4792]: I0929 18:56:52.668539 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 18:56:52 crc kubenswrapper[4792]: I0929 18:56:52.668789 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 18:56:52 crc kubenswrapper[4792]: E0929 18:56:52.668384 4792 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Sep 29 18:56:52 crc kubenswrapper[4792]: E0929 18:56:52.669075 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-09-29 18:56:56.669051742 +0000 UTC m=+28.662359158 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Sep 29 18:56:52 crc kubenswrapper[4792]: E0929 18:56:52.668747 4792 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Sep 29 18:56:52 crc kubenswrapper[4792]: E0929 18:56:52.669307 4792 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Sep 29 18:56:52 crc kubenswrapper[4792]: E0929 18:56:52.669411 4792 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 29 18:56:52 crc kubenswrapper[4792]: E0929 18:56:52.669531 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-09-29 18:56:56.669520224 +0000 UTC m=+28.662827630 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 29 18:56:52 crc kubenswrapper[4792]: E0929 18:56:52.668950 4792 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Sep 29 18:56:52 crc kubenswrapper[4792]: E0929 18:56:52.669731 4792 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Sep 29 18:56:52 crc kubenswrapper[4792]: E0929 18:56:52.669810 4792 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 29 18:56:52 crc kubenswrapper[4792]: E0929 18:56:52.669942 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-09-29 18:56:56.669929985 +0000 UTC m=+28.663237391 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 29 18:56:52 crc kubenswrapper[4792]: I0929 18:56:52.813913 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns/node-resolver-c228l"] Sep 29 18:56:52 crc kubenswrapper[4792]: I0929 18:56:52.814430 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/node-resolver-c228l" Sep 29 18:56:52 crc kubenswrapper[4792]: I0929 18:56:52.820571 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"openshift-service-ca.crt" Sep 29 18:56:52 crc kubenswrapper[4792]: I0929 18:56:52.820748 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"kube-root-ca.crt" Sep 29 18:56:52 crc kubenswrapper[4792]: I0929 18:56:52.820839 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"node-resolver-dockercfg-kz9s7" Sep 29 18:56:52 crc kubenswrapper[4792]: I0929 18:56:52.840054 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ba8b9fef5faf6504a0e363f092cc9f60b03723775a0a0624b6302b3dac43a7ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:56:52Z is after 2025-08-24T17:21:41Z" Sep 29 18:56:52 crc kubenswrapper[4792]: I0929 18:56:52.865919 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:56:52Z is after 2025-08-24T17:21:41Z" Sep 29 18:56:52 crc kubenswrapper[4792]: I0929 18:56:52.870180 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6mz5l\" (UniqueName: \"kubernetes.io/projected/fc16dcda-372e-4aac-8c12-148bf93e8783-kube-api-access-6mz5l\") pod \"node-resolver-c228l\" (UID: \"fc16dcda-372e-4aac-8c12-148bf93e8783\") " pod="openshift-dns/node-resolver-c228l" Sep 29 18:56:52 crc kubenswrapper[4792]: I0929 18:56:52.870283 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/fc16dcda-372e-4aac-8c12-148bf93e8783-hosts-file\") pod \"node-resolver-c228l\" (UID: \"fc16dcda-372e-4aac-8c12-148bf93e8783\") " pod="openshift-dns/node-resolver-c228l" Sep 29 18:56:52 crc kubenswrapper[4792]: I0929 18:56:52.879215 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8067c4cf598ce2d361c4a76b51ef3cf14d1fc84ad7ee193d76e20cd980f197be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://693ef3ee15f0b8762a16adc20435397e073dea4b0028f4175899cb956eaab303\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:56:52Z is after 2025-08-24T17:21:41Z" Sep 29 18:56:52 crc kubenswrapper[4792]: I0929 18:56:52.899984 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"49187618-8fed-4b0f-bdf8-800408f708fc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://325b543480e9e1abd49c6ce98398a79ef51983b8035774b2e88447ee547733c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://12d3875b8db9620798f766024b1bc43b78759f42e467b67aaf87f0b0154a8fad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://100ab44da711fddded7f88aa053b6a47d1c8302557d9ae6a56d4f744140e34bd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://af34e705a941f92c031edf3d214a902640010036401914f60e598a46043d5eb3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:29Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:56:52Z is after 2025-08-24T17:21:41Z" Sep 29 18:56:52 crc kubenswrapper[4792]: I0929 18:56:52.966366 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:56:52Z is after 2025-08-24T17:21:41Z" Sep 29 18:56:52 crc kubenswrapper[4792]: I0929 18:56:52.971179 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6mz5l\" (UniqueName: \"kubernetes.io/projected/fc16dcda-372e-4aac-8c12-148bf93e8783-kube-api-access-6mz5l\") pod \"node-resolver-c228l\" (UID: \"fc16dcda-372e-4aac-8c12-148bf93e8783\") " pod="openshift-dns/node-resolver-c228l" Sep 29 18:56:52 crc kubenswrapper[4792]: I0929 18:56:52.971238 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/fc16dcda-372e-4aac-8c12-148bf93e8783-hosts-file\") pod \"node-resolver-c228l\" (UID: \"fc16dcda-372e-4aac-8c12-148bf93e8783\") " pod="openshift-dns/node-resolver-c228l" Sep 29 18:56:52 crc kubenswrapper[4792]: I0929 18:56:52.971308 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/fc16dcda-372e-4aac-8c12-148bf93e8783-hosts-file\") pod \"node-resolver-c228l\" (UID: \"fc16dcda-372e-4aac-8c12-148bf93e8783\") " pod="openshift-dns/node-resolver-c228l" Sep 29 18:56:52 crc kubenswrapper[4792]: I0929 18:56:52.990509 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6mz5l\" (UniqueName: \"kubernetes.io/projected/fc16dcda-372e-4aac-8c12-148bf93e8783-kube-api-access-6mz5l\") pod \"node-resolver-c228l\" (UID: \"fc16dcda-372e-4aac-8c12-148bf93e8783\") " pod="openshift-dns/node-resolver-c228l" Sep 29 18:56:52 crc kubenswrapper[4792]: I0929 18:56:52.998488 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:56:52Z is after 2025-08-24T17:21:41Z" Sep 29 18:56:53 crc kubenswrapper[4792]: I0929 18:56:53.015225 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 18:56:53 crc kubenswrapper[4792]: I0929 18:56:53.015302 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 18:56:53 crc kubenswrapper[4792]: E0929 18:56:53.015337 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 18:56:53 crc kubenswrapper[4792]: E0929 18:56:53.015432 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 18:56:53 crc kubenswrapper[4792]: I0929 18:56:53.015514 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 18:56:53 crc kubenswrapper[4792]: E0929 18:56:53.015574 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 18:56:53 crc kubenswrapper[4792]: I0929 18:56:53.044973 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:52Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:52Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cb29207afd9a5fb06242890aaf6d32f2f789cbf824b0246706e7214486ac529c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:56:53Z is after 2025-08-24T17:21:41Z" Sep 29 18:56:53 crc kubenswrapper[4792]: I0929 18:56:53.062752 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-c228l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc16dcda-372e-4aac-8c12-148bf93e8783\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:52Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:52Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:52Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6mz5l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:52Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-c228l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:56:53Z is after 2025-08-24T17:21:41Z" Sep 29 18:56:53 crc kubenswrapper[4792]: I0929 18:56:53.125111 4792 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 29 18:56:53 crc kubenswrapper[4792]: I0929 18:56:53.128751 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/node-resolver-c228l" Sep 29 18:56:53 crc kubenswrapper[4792]: I0929 18:56:53.150686 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/node-resolver-c228l" event={"ID":"fc16dcda-372e-4aac-8c12-148bf93e8783","Type":"ContainerStarted","Data":"b84273352eee7e27dd10d9d8fe16943cdc2b6a1f6777bd4582246da9afd9a144"} Sep 29 18:56:53 crc kubenswrapper[4792]: I0929 18:56:53.204457 4792 scope.go:117] "RemoveContainer" containerID="0fdb8c0687f0a5ce31078f6d7a9b643c41ad23199eff4b2878403ee5fd31f69f" Sep 29 18:56:53 crc kubenswrapper[4792]: I0929 18:56:53.204626 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Sep 29 18:56:53 crc kubenswrapper[4792]: E0929 18:56:53.204631 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-apiserver-check-endpoints\" with CrashLoopBackOff: \"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\"" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" Sep 29 18:56:53 crc kubenswrapper[4792]: I0929 18:56:53.726503 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-daemon-p5q59"] Sep 29 18:56:53 crc kubenswrapper[4792]: I0929 18:56:53.726821 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-additional-cni-plugins-rqbjv"] Sep 29 18:56:53 crc kubenswrapper[4792]: I0929 18:56:53.726942 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" Sep 29 18:56:53 crc kubenswrapper[4792]: I0929 18:56:53.727884 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-hr4cm"] Sep 29 18:56:53 crc kubenswrapper[4792]: I0929 18:56:53.728064 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-additional-cni-plugins-rqbjv" Sep 29 18:56:53 crc kubenswrapper[4792]: I0929 18:56:53.728841 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-hr4cm" Sep 29 18:56:53 crc kubenswrapper[4792]: I0929 18:56:53.729669 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"proxy-tls" Sep 29 18:56:53 crc kubenswrapper[4792]: I0929 18:56:53.729801 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-rbac-proxy" Sep 29 18:56:53 crc kubenswrapper[4792]: I0929 18:56:53.729930 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"openshift-service-ca.crt" Sep 29 18:56:53 crc kubenswrapper[4792]: I0929 18:56:53.730504 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"openshift-service-ca.crt" Sep 29 18:56:53 crc kubenswrapper[4792]: I0929 18:56:53.732618 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"cni-copy-resources" Sep 29 18:56:53 crc kubenswrapper[4792]: I0929 18:56:53.733144 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"env-overrides" Sep 29 18:56:53 crc kubenswrapper[4792]: I0929 18:56:53.733347 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ancillary-tools-dockercfg-vnmsz" Sep 29 18:56:53 crc kubenswrapper[4792]: I0929 18:56:53.733455 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"default-cni-sysctl-allowlist" Sep 29 18:56:53 crc kubenswrapper[4792]: I0929 18:56:53.733574 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"openshift-service-ca.crt" Sep 29 18:56:53 crc kubenswrapper[4792]: I0929 18:56:53.734175 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-daemon-dockercfg-r5tcq" Sep 29 18:56:53 crc kubenswrapper[4792]: I0929 18:56:53.734279 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-node-metrics-cert" Sep 29 18:56:53 crc kubenswrapper[4792]: I0929 18:56:53.734383 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-root-ca.crt" Sep 29 18:56:53 crc kubenswrapper[4792]: I0929 18:56:53.734484 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-node-dockercfg-pwtwl" Sep 29 18:56:53 crc kubenswrapper[4792]: I0929 18:56:53.734579 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"kube-root-ca.crt" Sep 29 18:56:53 crc kubenswrapper[4792]: I0929 18:56:53.734669 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-config" Sep 29 18:56:53 crc kubenswrapper[4792]: I0929 18:56:53.734758 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"kube-root-ca.crt" Sep 29 18:56:53 crc kubenswrapper[4792]: I0929 18:56:53.734886 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-script-lib" Sep 29 18:56:53 crc kubenswrapper[4792]: I0929 18:56:53.741371 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-5hwvp"] Sep 29 18:56:53 crc kubenswrapper[4792]: I0929 18:56:53.741699 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-5hwvp" Sep 29 18:56:53 crc kubenswrapper[4792]: I0929 18:56:53.743086 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"multus-daemon-config" Sep 29 18:56:53 crc kubenswrapper[4792]: I0929 18:56:53.753075 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"default-dockercfg-2q5b6" Sep 29 18:56:53 crc kubenswrapper[4792]: I0929 18:56:53.757300 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"49187618-8fed-4b0f-bdf8-800408f708fc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://325b543480e9e1abd49c6ce98398a79ef51983b8035774b2e88447ee547733c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://12d3875b8db9620798f766024b1bc43b78759f42e467b67aaf87f0b0154a8fad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://100ab44da711fddded7f88aa053b6a47d1c8302557d9ae6a56d4f744140e34bd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://af34e705a941f92c031edf3d214a902640010036401914f60e598a46043d5eb3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:29Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:56:53Z is after 2025-08-24T17:21:41Z" Sep 29 18:56:53 crc kubenswrapper[4792]: I0929 18:56:53.771585 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:56:53Z is after 2025-08-24T17:21:41Z" Sep 29 18:56:53 crc kubenswrapper[4792]: I0929 18:56:53.777260 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/716c5fdd-0e02-4066-9210-93d805b6fe81-ovnkube-script-lib\") pod \"ovnkube-node-hr4cm\" (UID: \"716c5fdd-0e02-4066-9210-93d805b6fe81\") " pod="openshift-ovn-kubernetes/ovnkube-node-hr4cm" Sep 29 18:56:53 crc kubenswrapper[4792]: I0929 18:56:53.777293 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/67c58ee5-e056-4e3e-91ed-a116350f2408-system-cni-dir\") pod \"multus-additional-cni-plugins-rqbjv\" (UID: \"67c58ee5-e056-4e3e-91ed-a116350f2408\") " pod="openshift-multus/multus-additional-cni-plugins-rqbjv" Sep 29 18:56:53 crc kubenswrapper[4792]: I0929 18:56:53.777312 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/100876d3-2539-47f1-91fa-0f91456ccac1-os-release\") pod \"multus-5hwvp\" (UID: \"100876d3-2539-47f1-91fa-0f91456ccac1\") " pod="openshift-multus/multus-5hwvp" Sep 29 18:56:53 crc kubenswrapper[4792]: I0929 18:56:53.777329 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/716c5fdd-0e02-4066-9210-93d805b6fe81-node-log\") pod \"ovnkube-node-hr4cm\" (UID: \"716c5fdd-0e02-4066-9210-93d805b6fe81\") " pod="openshift-ovn-kubernetes/ovnkube-node-hr4cm" Sep 29 18:56:53 crc kubenswrapper[4792]: I0929 18:56:53.777345 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/716c5fdd-0e02-4066-9210-93d805b6fe81-run-openvswitch\") pod \"ovnkube-node-hr4cm\" (UID: \"716c5fdd-0e02-4066-9210-93d805b6fe81\") " pod="openshift-ovn-kubernetes/ovnkube-node-hr4cm" Sep 29 18:56:53 crc kubenswrapper[4792]: I0929 18:56:53.777363 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/716c5fdd-0e02-4066-9210-93d805b6fe81-etc-openvswitch\") pod \"ovnkube-node-hr4cm\" (UID: \"716c5fdd-0e02-4066-9210-93d805b6fe81\") " pod="openshift-ovn-kubernetes/ovnkube-node-hr4cm" Sep 29 18:56:53 crc kubenswrapper[4792]: I0929 18:56:53.777380 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0ae66548-086e-4ca9-bd6f-281ce46e7557-mcd-auth-proxy-config\") pod \"machine-config-daemon-p5q59\" (UID: \"0ae66548-086e-4ca9-bd6f-281ce46e7557\") " pod="openshift-machine-config-operator/machine-config-daemon-p5q59" Sep 29 18:56:53 crc kubenswrapper[4792]: I0929 18:56:53.777397 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ms9xw\" (UniqueName: \"kubernetes.io/projected/67c58ee5-e056-4e3e-91ed-a116350f2408-kube-api-access-ms9xw\") pod \"multus-additional-cni-plugins-rqbjv\" (UID: \"67c58ee5-e056-4e3e-91ed-a116350f2408\") " pod="openshift-multus/multus-additional-cni-plugins-rqbjv" Sep 29 18:56:53 crc kubenswrapper[4792]: I0929 18:56:53.777418 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/100876d3-2539-47f1-91fa-0f91456ccac1-host-var-lib-kubelet\") pod \"multus-5hwvp\" (UID: \"100876d3-2539-47f1-91fa-0f91456ccac1\") " pod="openshift-multus/multus-5hwvp" Sep 29 18:56:53 crc kubenswrapper[4792]: I0929 18:56:53.777460 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/100876d3-2539-47f1-91fa-0f91456ccac1-multus-conf-dir\") pod \"multus-5hwvp\" (UID: \"100876d3-2539-47f1-91fa-0f91456ccac1\") " pod="openshift-multus/multus-5hwvp" Sep 29 18:56:53 crc kubenswrapper[4792]: I0929 18:56:53.777477 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/716c5fdd-0e02-4066-9210-93d805b6fe81-host-kubelet\") pod \"ovnkube-node-hr4cm\" (UID: \"716c5fdd-0e02-4066-9210-93d805b6fe81\") " pod="openshift-ovn-kubernetes/ovnkube-node-hr4cm" Sep 29 18:56:53 crc kubenswrapper[4792]: I0929 18:56:53.777494 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/716c5fdd-0e02-4066-9210-93d805b6fe81-var-lib-openvswitch\") pod \"ovnkube-node-hr4cm\" (UID: \"716c5fdd-0e02-4066-9210-93d805b6fe81\") " pod="openshift-ovn-kubernetes/ovnkube-node-hr4cm" Sep 29 18:56:53 crc kubenswrapper[4792]: I0929 18:56:53.777511 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/716c5fdd-0e02-4066-9210-93d805b6fe81-host-cni-netd\") pod \"ovnkube-node-hr4cm\" (UID: \"716c5fdd-0e02-4066-9210-93d805b6fe81\") " pod="openshift-ovn-kubernetes/ovnkube-node-hr4cm" Sep 29 18:56:53 crc kubenswrapper[4792]: I0929 18:56:53.777527 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/100876d3-2539-47f1-91fa-0f91456ccac1-system-cni-dir\") pod \"multus-5hwvp\" (UID: \"100876d3-2539-47f1-91fa-0f91456ccac1\") " pod="openshift-multus/multus-5hwvp" Sep 29 18:56:53 crc kubenswrapper[4792]: I0929 18:56:53.777541 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/716c5fdd-0e02-4066-9210-93d805b6fe81-run-ovn\") pod \"ovnkube-node-hr4cm\" (UID: \"716c5fdd-0e02-4066-9210-93d805b6fe81\") " pod="openshift-ovn-kubernetes/ovnkube-node-hr4cm" Sep 29 18:56:53 crc kubenswrapper[4792]: I0929 18:56:53.777580 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/67c58ee5-e056-4e3e-91ed-a116350f2408-tuning-conf-dir\") pod \"multus-additional-cni-plugins-rqbjv\" (UID: \"67c58ee5-e056-4e3e-91ed-a116350f2408\") " pod="openshift-multus/multus-additional-cni-plugins-rqbjv" Sep 29 18:56:53 crc kubenswrapper[4792]: I0929 18:56:53.777673 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/716c5fdd-0e02-4066-9210-93d805b6fe81-host-cni-bin\") pod \"ovnkube-node-hr4cm\" (UID: \"716c5fdd-0e02-4066-9210-93d805b6fe81\") " pod="openshift-ovn-kubernetes/ovnkube-node-hr4cm" Sep 29 18:56:53 crc kubenswrapper[4792]: I0929 18:56:53.777708 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/716c5fdd-0e02-4066-9210-93d805b6fe81-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-hr4cm\" (UID: \"716c5fdd-0e02-4066-9210-93d805b6fe81\") " pod="openshift-ovn-kubernetes/ovnkube-node-hr4cm" Sep 29 18:56:53 crc kubenswrapper[4792]: I0929 18:56:53.777743 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-72xxv\" (UniqueName: \"kubernetes.io/projected/716c5fdd-0e02-4066-9210-93d805b6fe81-kube-api-access-72xxv\") pod \"ovnkube-node-hr4cm\" (UID: \"716c5fdd-0e02-4066-9210-93d805b6fe81\") " pod="openshift-ovn-kubernetes/ovnkube-node-hr4cm" Sep 29 18:56:53 crc kubenswrapper[4792]: I0929 18:56:53.777825 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/716c5fdd-0e02-4066-9210-93d805b6fe81-ovn-node-metrics-cert\") pod \"ovnkube-node-hr4cm\" (UID: \"716c5fdd-0e02-4066-9210-93d805b6fe81\") " pod="openshift-ovn-kubernetes/ovnkube-node-hr4cm" Sep 29 18:56:53 crc kubenswrapper[4792]: I0929 18:56:53.777880 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/716c5fdd-0e02-4066-9210-93d805b6fe81-run-systemd\") pod \"ovnkube-node-hr4cm\" (UID: \"716c5fdd-0e02-4066-9210-93d805b6fe81\") " pod="openshift-ovn-kubernetes/ovnkube-node-hr4cm" Sep 29 18:56:53 crc kubenswrapper[4792]: I0929 18:56:53.777898 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/716c5fdd-0e02-4066-9210-93d805b6fe81-env-overrides\") pod \"ovnkube-node-hr4cm\" (UID: \"716c5fdd-0e02-4066-9210-93d805b6fe81\") " pod="openshift-ovn-kubernetes/ovnkube-node-hr4cm" Sep 29 18:56:53 crc kubenswrapper[4792]: I0929 18:56:53.777914 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/100876d3-2539-47f1-91fa-0f91456ccac1-host-run-k8s-cni-cncf-io\") pod \"multus-5hwvp\" (UID: \"100876d3-2539-47f1-91fa-0f91456ccac1\") " pod="openshift-multus/multus-5hwvp" Sep 29 18:56:53 crc kubenswrapper[4792]: I0929 18:56:53.777929 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/100876d3-2539-47f1-91fa-0f91456ccac1-host-var-lib-cni-bin\") pod \"multus-5hwvp\" (UID: \"100876d3-2539-47f1-91fa-0f91456ccac1\") " pod="openshift-multus/multus-5hwvp" Sep 29 18:56:53 crc kubenswrapper[4792]: I0929 18:56:53.777947 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/100876d3-2539-47f1-91fa-0f91456ccac1-host-var-lib-cni-multus\") pod \"multus-5hwvp\" (UID: \"100876d3-2539-47f1-91fa-0f91456ccac1\") " pod="openshift-multus/multus-5hwvp" Sep 29 18:56:53 crc kubenswrapper[4792]: I0929 18:56:53.777963 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/100876d3-2539-47f1-91fa-0f91456ccac1-etc-kubernetes\") pod \"multus-5hwvp\" (UID: \"100876d3-2539-47f1-91fa-0f91456ccac1\") " pod="openshift-multus/multus-5hwvp" Sep 29 18:56:53 crc kubenswrapper[4792]: I0929 18:56:53.777979 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/716c5fdd-0e02-4066-9210-93d805b6fe81-systemd-units\") pod \"ovnkube-node-hr4cm\" (UID: \"716c5fdd-0e02-4066-9210-93d805b6fe81\") " pod="openshift-ovn-kubernetes/ovnkube-node-hr4cm" Sep 29 18:56:53 crc kubenswrapper[4792]: I0929 18:56:53.777997 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/100876d3-2539-47f1-91fa-0f91456ccac1-cnibin\") pod \"multus-5hwvp\" (UID: \"100876d3-2539-47f1-91fa-0f91456ccac1\") " pod="openshift-multus/multus-5hwvp" Sep 29 18:56:53 crc kubenswrapper[4792]: I0929 18:56:53.778012 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/100876d3-2539-47f1-91fa-0f91456ccac1-host-run-multus-certs\") pod \"multus-5hwvp\" (UID: \"100876d3-2539-47f1-91fa-0f91456ccac1\") " pod="openshift-multus/multus-5hwvp" Sep 29 18:56:53 crc kubenswrapper[4792]: I0929 18:56:53.778026 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/716c5fdd-0e02-4066-9210-93d805b6fe81-host-run-netns\") pod \"ovnkube-node-hr4cm\" (UID: \"716c5fdd-0e02-4066-9210-93d805b6fe81\") " pod="openshift-ovn-kubernetes/ovnkube-node-hr4cm" Sep 29 18:56:53 crc kubenswrapper[4792]: I0929 18:56:53.778043 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/67c58ee5-e056-4e3e-91ed-a116350f2408-cni-binary-copy\") pod \"multus-additional-cni-plugins-rqbjv\" (UID: \"67c58ee5-e056-4e3e-91ed-a116350f2408\") " pod="openshift-multus/multus-additional-cni-plugins-rqbjv" Sep 29 18:56:53 crc kubenswrapper[4792]: I0929 18:56:53.778069 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/100876d3-2539-47f1-91fa-0f91456ccac1-multus-daemon-config\") pod \"multus-5hwvp\" (UID: \"100876d3-2539-47f1-91fa-0f91456ccac1\") " pod="openshift-multus/multus-5hwvp" Sep 29 18:56:53 crc kubenswrapper[4792]: I0929 18:56:53.778089 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/100876d3-2539-47f1-91fa-0f91456ccac1-multus-socket-dir-parent\") pod \"multus-5hwvp\" (UID: \"100876d3-2539-47f1-91fa-0f91456ccac1\") " pod="openshift-multus/multus-5hwvp" Sep 29 18:56:53 crc kubenswrapper[4792]: I0929 18:56:53.778110 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/100876d3-2539-47f1-91fa-0f91456ccac1-host-run-netns\") pod \"multus-5hwvp\" (UID: \"100876d3-2539-47f1-91fa-0f91456ccac1\") " pod="openshift-multus/multus-5hwvp" Sep 29 18:56:53 crc kubenswrapper[4792]: I0929 18:56:53.778126 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/100876d3-2539-47f1-91fa-0f91456ccac1-hostroot\") pod \"multus-5hwvp\" (UID: \"100876d3-2539-47f1-91fa-0f91456ccac1\") " pod="openshift-multus/multus-5hwvp" Sep 29 18:56:53 crc kubenswrapper[4792]: I0929 18:56:53.778142 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/716c5fdd-0e02-4066-9210-93d805b6fe81-log-socket\") pod \"ovnkube-node-hr4cm\" (UID: \"716c5fdd-0e02-4066-9210-93d805b6fe81\") " pod="openshift-ovn-kubernetes/ovnkube-node-hr4cm" Sep 29 18:56:53 crc kubenswrapper[4792]: I0929 18:56:53.778159 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kdrmh\" (UniqueName: \"kubernetes.io/projected/0ae66548-086e-4ca9-bd6f-281ce46e7557-kube-api-access-kdrmh\") pod \"machine-config-daemon-p5q59\" (UID: \"0ae66548-086e-4ca9-bd6f-281ce46e7557\") " pod="openshift-machine-config-operator/machine-config-daemon-p5q59" Sep 29 18:56:53 crc kubenswrapper[4792]: I0929 18:56:53.778174 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/67c58ee5-e056-4e3e-91ed-a116350f2408-os-release\") pod \"multus-additional-cni-plugins-rqbjv\" (UID: \"67c58ee5-e056-4e3e-91ed-a116350f2408\") " pod="openshift-multus/multus-additional-cni-plugins-rqbjv" Sep 29 18:56:53 crc kubenswrapper[4792]: I0929 18:56:53.778190 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mfblz\" (UniqueName: \"kubernetes.io/projected/100876d3-2539-47f1-91fa-0f91456ccac1-kube-api-access-mfblz\") pod \"multus-5hwvp\" (UID: \"100876d3-2539-47f1-91fa-0f91456ccac1\") " pod="openshift-multus/multus-5hwvp" Sep 29 18:56:53 crc kubenswrapper[4792]: I0929 18:56:53.778206 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/716c5fdd-0e02-4066-9210-93d805b6fe81-host-slash\") pod \"ovnkube-node-hr4cm\" (UID: \"716c5fdd-0e02-4066-9210-93d805b6fe81\") " pod="openshift-ovn-kubernetes/ovnkube-node-hr4cm" Sep 29 18:56:53 crc kubenswrapper[4792]: I0929 18:56:53.778228 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/0ae66548-086e-4ca9-bd6f-281ce46e7557-rootfs\") pod \"machine-config-daemon-p5q59\" (UID: \"0ae66548-086e-4ca9-bd6f-281ce46e7557\") " pod="openshift-machine-config-operator/machine-config-daemon-p5q59" Sep 29 18:56:53 crc kubenswrapper[4792]: I0929 18:56:53.778253 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/100876d3-2539-47f1-91fa-0f91456ccac1-multus-cni-dir\") pod \"multus-5hwvp\" (UID: \"100876d3-2539-47f1-91fa-0f91456ccac1\") " pod="openshift-multus/multus-5hwvp" Sep 29 18:56:53 crc kubenswrapper[4792]: I0929 18:56:53.778267 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/716c5fdd-0e02-4066-9210-93d805b6fe81-ovnkube-config\") pod \"ovnkube-node-hr4cm\" (UID: \"716c5fdd-0e02-4066-9210-93d805b6fe81\") " pod="openshift-ovn-kubernetes/ovnkube-node-hr4cm" Sep 29 18:56:53 crc kubenswrapper[4792]: I0929 18:56:53.778282 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/67c58ee5-e056-4e3e-91ed-a116350f2408-cnibin\") pod \"multus-additional-cni-plugins-rqbjv\" (UID: \"67c58ee5-e056-4e3e-91ed-a116350f2408\") " pod="openshift-multus/multus-additional-cni-plugins-rqbjv" Sep 29 18:56:53 crc kubenswrapper[4792]: I0929 18:56:53.778296 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/67c58ee5-e056-4e3e-91ed-a116350f2408-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-rqbjv\" (UID: \"67c58ee5-e056-4e3e-91ed-a116350f2408\") " pod="openshift-multus/multus-additional-cni-plugins-rqbjv" Sep 29 18:56:53 crc kubenswrapper[4792]: I0929 18:56:53.778331 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0ae66548-086e-4ca9-bd6f-281ce46e7557-proxy-tls\") pod \"machine-config-daemon-p5q59\" (UID: \"0ae66548-086e-4ca9-bd6f-281ce46e7557\") " pod="openshift-machine-config-operator/machine-config-daemon-p5q59" Sep 29 18:56:53 crc kubenswrapper[4792]: I0929 18:56:53.778351 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/716c5fdd-0e02-4066-9210-93d805b6fe81-host-run-ovn-kubernetes\") pod \"ovnkube-node-hr4cm\" (UID: \"716c5fdd-0e02-4066-9210-93d805b6fe81\") " pod="openshift-ovn-kubernetes/ovnkube-node-hr4cm" Sep 29 18:56:53 crc kubenswrapper[4792]: I0929 18:56:53.778366 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/100876d3-2539-47f1-91fa-0f91456ccac1-cni-binary-copy\") pod \"multus-5hwvp\" (UID: \"100876d3-2539-47f1-91fa-0f91456ccac1\") " pod="openshift-multus/multus-5hwvp" Sep 29 18:56:53 crc kubenswrapper[4792]: I0929 18:56:53.786330 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:56:53Z is after 2025-08-24T17:21:41Z" Sep 29 18:56:53 crc kubenswrapper[4792]: I0929 18:56:53.798508 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:52Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:52Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cb29207afd9a5fb06242890aaf6d32f2f789cbf824b0246706e7214486ac529c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:56:53Z is after 2025-08-24T17:21:41Z" Sep 29 18:56:53 crc kubenswrapper[4792]: I0929 18:56:53.813358 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-c228l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc16dcda-372e-4aac-8c12-148bf93e8783\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:52Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:52Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:52Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6mz5l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:52Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-c228l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:56:53Z is after 2025-08-24T17:21:41Z" Sep 29 18:56:53 crc kubenswrapper[4792]: I0929 18:56:53.824085 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0ae66548-086e-4ca9-bd6f-281ce46e7557\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:53Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:53Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:53Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:53Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-p5q59\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:56:53Z is after 2025-08-24T17:21:41Z" Sep 29 18:56:53 crc kubenswrapper[4792]: I0929 18:56:53.837003 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"10bc9cb6-78d3-43a6-8276-db1cb1c116e0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:29Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:29Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://238035b6ad975064a44e7e9e760ae9f09c9ff2735ecc69f65df3fc3176f6d3b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://659f798faed7d5c35bf7959b8e42a37f2289854714d513962716cb73a0674a27\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://38d29fa86782b007a60cbc1ea8f2ccc250dabb38c84eb61931b05fbc170e6538\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0fdb8c0687f0a5ce31078f6d7a9b643c41ad23199eff4b2878403ee5fd31f69f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0fdb8c0687f0a5ce31078f6d7a9b643c41ad23199eff4b2878403ee5fd31f69f\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T18:56:45Z\\\",\\\"message\\\":\\\"9 18:56:45.310775 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0929 18:56:45.310981 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI0929 18:56:45.312802 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0929 18:56:45.312836 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0929 18:56:45.312870 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0929 18:56:45.312900 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0929 18:56:45.312941 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI0929 18:56:45.312944 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0929 18:56:45.312985 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI0929 18:56:45.313033 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI0929 18:56:45.313121 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-682747971/tls.crt::/tmp/serving-cert-682747971/tls.key\\\\\\\"\\\\nI0929 18:56:45.313163 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-682747971/tls.crt::/tmp/serving-cert-682747971/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1759172204\\\\\\\\\\\\\\\" (2025-09-29 18:56:43 +0000 UTC to 2025-10-29 18:56:44 +0000 UTC (now=2025-09-29 18:56:45.313121069 +0000 UTC))\\\\\\\"\\\\nF0929 18:56:45.313206 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:44Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://07171a8c3c7812c016ee534ff1332f697d2b2cdfa70fc9d94ae6a5f312e0e433\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:32Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6e733b73d0293211bf2e8e97dc7db49c34e8ac1ef1e6e19013183d8518345959\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6e733b73d0293211bf2e8e97dc7db49c34e8ac1ef1e6e19013183d8518345959\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T18:56:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:29Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:56:53Z is after 2025-08-24T17:21:41Z" Sep 29 18:56:53 crc kubenswrapper[4792]: I0929 18:56:53.848580 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ba8b9fef5faf6504a0e363f092cc9f60b03723775a0a0624b6302b3dac43a7ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:56:53Z is after 2025-08-24T17:21:41Z" Sep 29 18:56:53 crc kubenswrapper[4792]: I0929 18:56:53.863578 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:56:53Z is after 2025-08-24T17:21:41Z" Sep 29 18:56:53 crc kubenswrapper[4792]: I0929 18:56:53.879184 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kdrmh\" (UniqueName: \"kubernetes.io/projected/0ae66548-086e-4ca9-bd6f-281ce46e7557-kube-api-access-kdrmh\") pod \"machine-config-daemon-p5q59\" (UID: \"0ae66548-086e-4ca9-bd6f-281ce46e7557\") " pod="openshift-machine-config-operator/machine-config-daemon-p5q59" Sep 29 18:56:53 crc kubenswrapper[4792]: I0929 18:56:53.879222 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/100876d3-2539-47f1-91fa-0f91456ccac1-multus-socket-dir-parent\") pod \"multus-5hwvp\" (UID: \"100876d3-2539-47f1-91fa-0f91456ccac1\") " pod="openshift-multus/multus-5hwvp" Sep 29 18:56:53 crc kubenswrapper[4792]: I0929 18:56:53.879196 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8067c4cf598ce2d361c4a76b51ef3cf14d1fc84ad7ee193d76e20cd980f197be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://693ef3ee15f0b8762a16adc20435397e073dea4b0028f4175899cb956eaab303\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:56:53Z is after 2025-08-24T17:21:41Z" Sep 29 18:56:53 crc kubenswrapper[4792]: I0929 18:56:53.879240 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/100876d3-2539-47f1-91fa-0f91456ccac1-host-run-netns\") pod \"multus-5hwvp\" (UID: \"100876d3-2539-47f1-91fa-0f91456ccac1\") " pod="openshift-multus/multus-5hwvp" Sep 29 18:56:53 crc kubenswrapper[4792]: I0929 18:56:53.879257 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/100876d3-2539-47f1-91fa-0f91456ccac1-hostroot\") pod \"multus-5hwvp\" (UID: \"100876d3-2539-47f1-91fa-0f91456ccac1\") " pod="openshift-multus/multus-5hwvp" Sep 29 18:56:53 crc kubenswrapper[4792]: I0929 18:56:53.879273 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/716c5fdd-0e02-4066-9210-93d805b6fe81-log-socket\") pod \"ovnkube-node-hr4cm\" (UID: \"716c5fdd-0e02-4066-9210-93d805b6fe81\") " pod="openshift-ovn-kubernetes/ovnkube-node-hr4cm" Sep 29 18:56:53 crc kubenswrapper[4792]: I0929 18:56:53.879288 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/67c58ee5-e056-4e3e-91ed-a116350f2408-os-release\") pod \"multus-additional-cni-plugins-rqbjv\" (UID: \"67c58ee5-e056-4e3e-91ed-a116350f2408\") " pod="openshift-multus/multus-additional-cni-plugins-rqbjv" Sep 29 18:56:53 crc kubenswrapper[4792]: I0929 18:56:53.879312 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mfblz\" (UniqueName: \"kubernetes.io/projected/100876d3-2539-47f1-91fa-0f91456ccac1-kube-api-access-mfblz\") pod \"multus-5hwvp\" (UID: \"100876d3-2539-47f1-91fa-0f91456ccac1\") " pod="openshift-multus/multus-5hwvp" Sep 29 18:56:53 crc kubenswrapper[4792]: I0929 18:56:53.879315 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/100876d3-2539-47f1-91fa-0f91456ccac1-multus-socket-dir-parent\") pod \"multus-5hwvp\" (UID: \"100876d3-2539-47f1-91fa-0f91456ccac1\") " pod="openshift-multus/multus-5hwvp" Sep 29 18:56:53 crc kubenswrapper[4792]: I0929 18:56:53.879327 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/716c5fdd-0e02-4066-9210-93d805b6fe81-host-slash\") pod \"ovnkube-node-hr4cm\" (UID: \"716c5fdd-0e02-4066-9210-93d805b6fe81\") " pod="openshift-ovn-kubernetes/ovnkube-node-hr4cm" Sep 29 18:56:53 crc kubenswrapper[4792]: I0929 18:56:53.879343 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/0ae66548-086e-4ca9-bd6f-281ce46e7557-rootfs\") pod \"machine-config-daemon-p5q59\" (UID: \"0ae66548-086e-4ca9-bd6f-281ce46e7557\") " pod="openshift-machine-config-operator/machine-config-daemon-p5q59" Sep 29 18:56:53 crc kubenswrapper[4792]: I0929 18:56:53.879358 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/100876d3-2539-47f1-91fa-0f91456ccac1-multus-cni-dir\") pod \"multus-5hwvp\" (UID: \"100876d3-2539-47f1-91fa-0f91456ccac1\") " pod="openshift-multus/multus-5hwvp" Sep 29 18:56:53 crc kubenswrapper[4792]: I0929 18:56:53.879365 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/716c5fdd-0e02-4066-9210-93d805b6fe81-log-socket\") pod \"ovnkube-node-hr4cm\" (UID: \"716c5fdd-0e02-4066-9210-93d805b6fe81\") " pod="openshift-ovn-kubernetes/ovnkube-node-hr4cm" Sep 29 18:56:53 crc kubenswrapper[4792]: I0929 18:56:53.879374 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/716c5fdd-0e02-4066-9210-93d805b6fe81-ovnkube-config\") pod \"ovnkube-node-hr4cm\" (UID: \"716c5fdd-0e02-4066-9210-93d805b6fe81\") " pod="openshift-ovn-kubernetes/ovnkube-node-hr4cm" Sep 29 18:56:53 crc kubenswrapper[4792]: I0929 18:56:53.879388 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/67c58ee5-e056-4e3e-91ed-a116350f2408-cnibin\") pod \"multus-additional-cni-plugins-rqbjv\" (UID: \"67c58ee5-e056-4e3e-91ed-a116350f2408\") " pod="openshift-multus/multus-additional-cni-plugins-rqbjv" Sep 29 18:56:53 crc kubenswrapper[4792]: I0929 18:56:53.879393 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/100876d3-2539-47f1-91fa-0f91456ccac1-hostroot\") pod \"multus-5hwvp\" (UID: \"100876d3-2539-47f1-91fa-0f91456ccac1\") " pod="openshift-multus/multus-5hwvp" Sep 29 18:56:53 crc kubenswrapper[4792]: I0929 18:56:53.879404 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/67c58ee5-e056-4e3e-91ed-a116350f2408-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-rqbjv\" (UID: \"67c58ee5-e056-4e3e-91ed-a116350f2408\") " pod="openshift-multus/multus-additional-cni-plugins-rqbjv" Sep 29 18:56:53 crc kubenswrapper[4792]: I0929 18:56:53.879413 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/100876d3-2539-47f1-91fa-0f91456ccac1-host-run-netns\") pod \"multus-5hwvp\" (UID: \"100876d3-2539-47f1-91fa-0f91456ccac1\") " pod="openshift-multus/multus-5hwvp" Sep 29 18:56:53 crc kubenswrapper[4792]: I0929 18:56:53.879420 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0ae66548-086e-4ca9-bd6f-281ce46e7557-proxy-tls\") pod \"machine-config-daemon-p5q59\" (UID: \"0ae66548-086e-4ca9-bd6f-281ce46e7557\") " pod="openshift-machine-config-operator/machine-config-daemon-p5q59" Sep 29 18:56:53 crc kubenswrapper[4792]: I0929 18:56:53.879426 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/0ae66548-086e-4ca9-bd6f-281ce46e7557-rootfs\") pod \"machine-config-daemon-p5q59\" (UID: \"0ae66548-086e-4ca9-bd6f-281ce46e7557\") " pod="openshift-machine-config-operator/machine-config-daemon-p5q59" Sep 29 18:56:53 crc kubenswrapper[4792]: I0929 18:56:53.879436 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/100876d3-2539-47f1-91fa-0f91456ccac1-cni-binary-copy\") pod \"multus-5hwvp\" (UID: \"100876d3-2539-47f1-91fa-0f91456ccac1\") " pod="openshift-multus/multus-5hwvp" Sep 29 18:56:53 crc kubenswrapper[4792]: I0929 18:56:53.879450 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/716c5fdd-0e02-4066-9210-93d805b6fe81-host-run-ovn-kubernetes\") pod \"ovnkube-node-hr4cm\" (UID: \"716c5fdd-0e02-4066-9210-93d805b6fe81\") " pod="openshift-ovn-kubernetes/ovnkube-node-hr4cm" Sep 29 18:56:53 crc kubenswrapper[4792]: I0929 18:56:53.879468 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/100876d3-2539-47f1-91fa-0f91456ccac1-os-release\") pod \"multus-5hwvp\" (UID: \"100876d3-2539-47f1-91fa-0f91456ccac1\") " pod="openshift-multus/multus-5hwvp" Sep 29 18:56:53 crc kubenswrapper[4792]: I0929 18:56:53.879482 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/716c5fdd-0e02-4066-9210-93d805b6fe81-ovnkube-script-lib\") pod \"ovnkube-node-hr4cm\" (UID: \"716c5fdd-0e02-4066-9210-93d805b6fe81\") " pod="openshift-ovn-kubernetes/ovnkube-node-hr4cm" Sep 29 18:56:53 crc kubenswrapper[4792]: I0929 18:56:53.879498 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/67c58ee5-e056-4e3e-91ed-a116350f2408-system-cni-dir\") pod \"multus-additional-cni-plugins-rqbjv\" (UID: \"67c58ee5-e056-4e3e-91ed-a116350f2408\") " pod="openshift-multus/multus-additional-cni-plugins-rqbjv" Sep 29 18:56:53 crc kubenswrapper[4792]: I0929 18:56:53.879514 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/716c5fdd-0e02-4066-9210-93d805b6fe81-run-openvswitch\") pod \"ovnkube-node-hr4cm\" (UID: \"716c5fdd-0e02-4066-9210-93d805b6fe81\") " pod="openshift-ovn-kubernetes/ovnkube-node-hr4cm" Sep 29 18:56:53 crc kubenswrapper[4792]: I0929 18:56:53.879527 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/716c5fdd-0e02-4066-9210-93d805b6fe81-node-log\") pod \"ovnkube-node-hr4cm\" (UID: \"716c5fdd-0e02-4066-9210-93d805b6fe81\") " pod="openshift-ovn-kubernetes/ovnkube-node-hr4cm" Sep 29 18:56:53 crc kubenswrapper[4792]: I0929 18:56:53.879543 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/716c5fdd-0e02-4066-9210-93d805b6fe81-etc-openvswitch\") pod \"ovnkube-node-hr4cm\" (UID: \"716c5fdd-0e02-4066-9210-93d805b6fe81\") " pod="openshift-ovn-kubernetes/ovnkube-node-hr4cm" Sep 29 18:56:53 crc kubenswrapper[4792]: I0929 18:56:53.879559 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0ae66548-086e-4ca9-bd6f-281ce46e7557-mcd-auth-proxy-config\") pod \"machine-config-daemon-p5q59\" (UID: \"0ae66548-086e-4ca9-bd6f-281ce46e7557\") " pod="openshift-machine-config-operator/machine-config-daemon-p5q59" Sep 29 18:56:53 crc kubenswrapper[4792]: I0929 18:56:53.879573 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ms9xw\" (UniqueName: \"kubernetes.io/projected/67c58ee5-e056-4e3e-91ed-a116350f2408-kube-api-access-ms9xw\") pod \"multus-additional-cni-plugins-rqbjv\" (UID: \"67c58ee5-e056-4e3e-91ed-a116350f2408\") " pod="openshift-multus/multus-additional-cni-plugins-rqbjv" Sep 29 18:56:53 crc kubenswrapper[4792]: I0929 18:56:53.879575 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/67c58ee5-e056-4e3e-91ed-a116350f2408-os-release\") pod \"multus-additional-cni-plugins-rqbjv\" (UID: \"67c58ee5-e056-4e3e-91ed-a116350f2408\") " pod="openshift-multus/multus-additional-cni-plugins-rqbjv" Sep 29 18:56:53 crc kubenswrapper[4792]: I0929 18:56:53.879587 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/100876d3-2539-47f1-91fa-0f91456ccac1-system-cni-dir\") pod \"multus-5hwvp\" (UID: \"100876d3-2539-47f1-91fa-0f91456ccac1\") " pod="openshift-multus/multus-5hwvp" Sep 29 18:56:53 crc kubenswrapper[4792]: I0929 18:56:53.879599 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/716c5fdd-0e02-4066-9210-93d805b6fe81-host-slash\") pod \"ovnkube-node-hr4cm\" (UID: \"716c5fdd-0e02-4066-9210-93d805b6fe81\") " pod="openshift-ovn-kubernetes/ovnkube-node-hr4cm" Sep 29 18:56:53 crc kubenswrapper[4792]: I0929 18:56:53.879601 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/100876d3-2539-47f1-91fa-0f91456ccac1-host-var-lib-kubelet\") pod \"multus-5hwvp\" (UID: \"100876d3-2539-47f1-91fa-0f91456ccac1\") " pod="openshift-multus/multus-5hwvp" Sep 29 18:56:53 crc kubenswrapper[4792]: I0929 18:56:53.879613 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/716c5fdd-0e02-4066-9210-93d805b6fe81-run-openvswitch\") pod \"ovnkube-node-hr4cm\" (UID: \"716c5fdd-0e02-4066-9210-93d805b6fe81\") " pod="openshift-ovn-kubernetes/ovnkube-node-hr4cm" Sep 29 18:56:53 crc kubenswrapper[4792]: I0929 18:56:53.879626 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/100876d3-2539-47f1-91fa-0f91456ccac1-host-var-lib-kubelet\") pod \"multus-5hwvp\" (UID: \"100876d3-2539-47f1-91fa-0f91456ccac1\") " pod="openshift-multus/multus-5hwvp" Sep 29 18:56:53 crc kubenswrapper[4792]: I0929 18:56:53.879641 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/100876d3-2539-47f1-91fa-0f91456ccac1-multus-conf-dir\") pod \"multus-5hwvp\" (UID: \"100876d3-2539-47f1-91fa-0f91456ccac1\") " pod="openshift-multus/multus-5hwvp" Sep 29 18:56:53 crc kubenswrapper[4792]: I0929 18:56:53.879651 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/716c5fdd-0e02-4066-9210-93d805b6fe81-host-run-ovn-kubernetes\") pod \"ovnkube-node-hr4cm\" (UID: \"716c5fdd-0e02-4066-9210-93d805b6fe81\") " pod="openshift-ovn-kubernetes/ovnkube-node-hr4cm" Sep 29 18:56:53 crc kubenswrapper[4792]: I0929 18:56:53.879687 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/100876d3-2539-47f1-91fa-0f91456ccac1-os-release\") pod \"multus-5hwvp\" (UID: \"100876d3-2539-47f1-91fa-0f91456ccac1\") " pod="openshift-multus/multus-5hwvp" Sep 29 18:56:53 crc kubenswrapper[4792]: I0929 18:56:53.879955 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/100876d3-2539-47f1-91fa-0f91456ccac1-system-cni-dir\") pod \"multus-5hwvp\" (UID: \"100876d3-2539-47f1-91fa-0f91456ccac1\") " pod="openshift-multus/multus-5hwvp" Sep 29 18:56:53 crc kubenswrapper[4792]: I0929 18:56:53.879627 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/100876d3-2539-47f1-91fa-0f91456ccac1-multus-conf-dir\") pod \"multus-5hwvp\" (UID: \"100876d3-2539-47f1-91fa-0f91456ccac1\") " pod="openshift-multus/multus-5hwvp" Sep 29 18:56:53 crc kubenswrapper[4792]: I0929 18:56:53.879983 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/716c5fdd-0e02-4066-9210-93d805b6fe81-host-kubelet\") pod \"ovnkube-node-hr4cm\" (UID: \"716c5fdd-0e02-4066-9210-93d805b6fe81\") " pod="openshift-ovn-kubernetes/ovnkube-node-hr4cm" Sep 29 18:56:53 crc kubenswrapper[4792]: I0929 18:56:53.879997 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/716c5fdd-0e02-4066-9210-93d805b6fe81-var-lib-openvswitch\") pod \"ovnkube-node-hr4cm\" (UID: \"716c5fdd-0e02-4066-9210-93d805b6fe81\") " pod="openshift-ovn-kubernetes/ovnkube-node-hr4cm" Sep 29 18:56:53 crc kubenswrapper[4792]: I0929 18:56:53.880010 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/716c5fdd-0e02-4066-9210-93d805b6fe81-host-cni-netd\") pod \"ovnkube-node-hr4cm\" (UID: \"716c5fdd-0e02-4066-9210-93d805b6fe81\") " pod="openshift-ovn-kubernetes/ovnkube-node-hr4cm" Sep 29 18:56:53 crc kubenswrapper[4792]: I0929 18:56:53.880034 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/716c5fdd-0e02-4066-9210-93d805b6fe81-run-ovn\") pod \"ovnkube-node-hr4cm\" (UID: \"716c5fdd-0e02-4066-9210-93d805b6fe81\") " pod="openshift-ovn-kubernetes/ovnkube-node-hr4cm" Sep 29 18:56:53 crc kubenswrapper[4792]: I0929 18:56:53.880047 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/67c58ee5-e056-4e3e-91ed-a116350f2408-tuning-conf-dir\") pod \"multus-additional-cni-plugins-rqbjv\" (UID: \"67c58ee5-e056-4e3e-91ed-a116350f2408\") " pod="openshift-multus/multus-additional-cni-plugins-rqbjv" Sep 29 18:56:53 crc kubenswrapper[4792]: I0929 18:56:53.880068 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/716c5fdd-0e02-4066-9210-93d805b6fe81-host-cni-bin\") pod \"ovnkube-node-hr4cm\" (UID: \"716c5fdd-0e02-4066-9210-93d805b6fe81\") " pod="openshift-ovn-kubernetes/ovnkube-node-hr4cm" Sep 29 18:56:53 crc kubenswrapper[4792]: I0929 18:56:53.880085 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/716c5fdd-0e02-4066-9210-93d805b6fe81-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-hr4cm\" (UID: \"716c5fdd-0e02-4066-9210-93d805b6fe81\") " pod="openshift-ovn-kubernetes/ovnkube-node-hr4cm" Sep 29 18:56:53 crc kubenswrapper[4792]: I0929 18:56:53.880102 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-72xxv\" (UniqueName: \"kubernetes.io/projected/716c5fdd-0e02-4066-9210-93d805b6fe81-kube-api-access-72xxv\") pod \"ovnkube-node-hr4cm\" (UID: \"716c5fdd-0e02-4066-9210-93d805b6fe81\") " pod="openshift-ovn-kubernetes/ovnkube-node-hr4cm" Sep 29 18:56:53 crc kubenswrapper[4792]: I0929 18:56:53.880117 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/716c5fdd-0e02-4066-9210-93d805b6fe81-ovn-node-metrics-cert\") pod \"ovnkube-node-hr4cm\" (UID: \"716c5fdd-0e02-4066-9210-93d805b6fe81\") " pod="openshift-ovn-kubernetes/ovnkube-node-hr4cm" Sep 29 18:56:53 crc kubenswrapper[4792]: I0929 18:56:53.880134 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/100876d3-2539-47f1-91fa-0f91456ccac1-host-run-k8s-cni-cncf-io\") pod \"multus-5hwvp\" (UID: \"100876d3-2539-47f1-91fa-0f91456ccac1\") " pod="openshift-multus/multus-5hwvp" Sep 29 18:56:53 crc kubenswrapper[4792]: I0929 18:56:53.880147 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/716c5fdd-0e02-4066-9210-93d805b6fe81-run-systemd\") pod \"ovnkube-node-hr4cm\" (UID: \"716c5fdd-0e02-4066-9210-93d805b6fe81\") " pod="openshift-ovn-kubernetes/ovnkube-node-hr4cm" Sep 29 18:56:53 crc kubenswrapper[4792]: I0929 18:56:53.880162 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/716c5fdd-0e02-4066-9210-93d805b6fe81-env-overrides\") pod \"ovnkube-node-hr4cm\" (UID: \"716c5fdd-0e02-4066-9210-93d805b6fe81\") " pod="openshift-ovn-kubernetes/ovnkube-node-hr4cm" Sep 29 18:56:53 crc kubenswrapper[4792]: I0929 18:56:53.880176 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/100876d3-2539-47f1-91fa-0f91456ccac1-host-var-lib-cni-bin\") pod \"multus-5hwvp\" (UID: \"100876d3-2539-47f1-91fa-0f91456ccac1\") " pod="openshift-multus/multus-5hwvp" Sep 29 18:56:53 crc kubenswrapper[4792]: I0929 18:56:53.880189 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/100876d3-2539-47f1-91fa-0f91456ccac1-host-var-lib-cni-multus\") pod \"multus-5hwvp\" (UID: \"100876d3-2539-47f1-91fa-0f91456ccac1\") " pod="openshift-multus/multus-5hwvp" Sep 29 18:56:53 crc kubenswrapper[4792]: I0929 18:56:53.880202 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/100876d3-2539-47f1-91fa-0f91456ccac1-etc-kubernetes\") pod \"multus-5hwvp\" (UID: \"100876d3-2539-47f1-91fa-0f91456ccac1\") " pod="openshift-multus/multus-5hwvp" Sep 29 18:56:53 crc kubenswrapper[4792]: I0929 18:56:53.880221 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/716c5fdd-0e02-4066-9210-93d805b6fe81-systemd-units\") pod \"ovnkube-node-hr4cm\" (UID: \"716c5fdd-0e02-4066-9210-93d805b6fe81\") " pod="openshift-ovn-kubernetes/ovnkube-node-hr4cm" Sep 29 18:56:53 crc kubenswrapper[4792]: I0929 18:56:53.880235 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/100876d3-2539-47f1-91fa-0f91456ccac1-cnibin\") pod \"multus-5hwvp\" (UID: \"100876d3-2539-47f1-91fa-0f91456ccac1\") " pod="openshift-multus/multus-5hwvp" Sep 29 18:56:53 crc kubenswrapper[4792]: I0929 18:56:53.880252 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/100876d3-2539-47f1-91fa-0f91456ccac1-host-run-multus-certs\") pod \"multus-5hwvp\" (UID: \"100876d3-2539-47f1-91fa-0f91456ccac1\") " pod="openshift-multus/multus-5hwvp" Sep 29 18:56:53 crc kubenswrapper[4792]: I0929 18:56:53.880267 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/716c5fdd-0e02-4066-9210-93d805b6fe81-host-run-netns\") pod \"ovnkube-node-hr4cm\" (UID: \"716c5fdd-0e02-4066-9210-93d805b6fe81\") " pod="openshift-ovn-kubernetes/ovnkube-node-hr4cm" Sep 29 18:56:53 crc kubenswrapper[4792]: I0929 18:56:53.880284 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/67c58ee5-e056-4e3e-91ed-a116350f2408-cni-binary-copy\") pod \"multus-additional-cni-plugins-rqbjv\" (UID: \"67c58ee5-e056-4e3e-91ed-a116350f2408\") " pod="openshift-multus/multus-additional-cni-plugins-rqbjv" Sep 29 18:56:53 crc kubenswrapper[4792]: I0929 18:56:53.880293 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/100876d3-2539-47f1-91fa-0f91456ccac1-multus-cni-dir\") pod \"multus-5hwvp\" (UID: \"100876d3-2539-47f1-91fa-0f91456ccac1\") " pod="openshift-multus/multus-5hwvp" Sep 29 18:56:53 crc kubenswrapper[4792]: I0929 18:56:53.880309 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/100876d3-2539-47f1-91fa-0f91456ccac1-multus-daemon-config\") pod \"multus-5hwvp\" (UID: \"100876d3-2539-47f1-91fa-0f91456ccac1\") " pod="openshift-multus/multus-5hwvp" Sep 29 18:56:53 crc kubenswrapper[4792]: I0929 18:56:53.880351 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0ae66548-086e-4ca9-bd6f-281ce46e7557-mcd-auth-proxy-config\") pod \"machine-config-daemon-p5q59\" (UID: \"0ae66548-086e-4ca9-bd6f-281ce46e7557\") " pod="openshift-machine-config-operator/machine-config-daemon-p5q59" Sep 29 18:56:53 crc kubenswrapper[4792]: I0929 18:56:53.880360 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/67c58ee5-e056-4e3e-91ed-a116350f2408-cnibin\") pod \"multus-additional-cni-plugins-rqbjv\" (UID: \"67c58ee5-e056-4e3e-91ed-a116350f2408\") " pod="openshift-multus/multus-additional-cni-plugins-rqbjv" Sep 29 18:56:53 crc kubenswrapper[4792]: I0929 18:56:53.880375 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/716c5fdd-0e02-4066-9210-93d805b6fe81-ovnkube-script-lib\") pod \"ovnkube-node-hr4cm\" (UID: \"716c5fdd-0e02-4066-9210-93d805b6fe81\") " pod="openshift-ovn-kubernetes/ovnkube-node-hr4cm" Sep 29 18:56:53 crc kubenswrapper[4792]: I0929 18:56:53.880392 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/716c5fdd-0e02-4066-9210-93d805b6fe81-node-log\") pod \"ovnkube-node-hr4cm\" (UID: \"716c5fdd-0e02-4066-9210-93d805b6fe81\") " pod="openshift-ovn-kubernetes/ovnkube-node-hr4cm" Sep 29 18:56:53 crc kubenswrapper[4792]: I0929 18:56:53.880420 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/716c5fdd-0e02-4066-9210-93d805b6fe81-etc-openvswitch\") pod \"ovnkube-node-hr4cm\" (UID: \"716c5fdd-0e02-4066-9210-93d805b6fe81\") " pod="openshift-ovn-kubernetes/ovnkube-node-hr4cm" Sep 29 18:56:53 crc kubenswrapper[4792]: I0929 18:56:53.880445 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/716c5fdd-0e02-4066-9210-93d805b6fe81-run-systemd\") pod \"ovnkube-node-hr4cm\" (UID: \"716c5fdd-0e02-4066-9210-93d805b6fe81\") " pod="openshift-ovn-kubernetes/ovnkube-node-hr4cm" Sep 29 18:56:53 crc kubenswrapper[4792]: I0929 18:56:53.880464 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/716c5fdd-0e02-4066-9210-93d805b6fe81-host-kubelet\") pod \"ovnkube-node-hr4cm\" (UID: \"716c5fdd-0e02-4066-9210-93d805b6fe81\") " pod="openshift-ovn-kubernetes/ovnkube-node-hr4cm" Sep 29 18:56:53 crc kubenswrapper[4792]: I0929 18:56:53.880483 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/716c5fdd-0e02-4066-9210-93d805b6fe81-var-lib-openvswitch\") pod \"ovnkube-node-hr4cm\" (UID: \"716c5fdd-0e02-4066-9210-93d805b6fe81\") " pod="openshift-ovn-kubernetes/ovnkube-node-hr4cm" Sep 29 18:56:53 crc kubenswrapper[4792]: I0929 18:56:53.880504 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/716c5fdd-0e02-4066-9210-93d805b6fe81-host-cni-netd\") pod \"ovnkube-node-hr4cm\" (UID: \"716c5fdd-0e02-4066-9210-93d805b6fe81\") " pod="openshift-ovn-kubernetes/ovnkube-node-hr4cm" Sep 29 18:56:53 crc kubenswrapper[4792]: I0929 18:56:53.880527 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/716c5fdd-0e02-4066-9210-93d805b6fe81-run-ovn\") pod \"ovnkube-node-hr4cm\" (UID: \"716c5fdd-0e02-4066-9210-93d805b6fe81\") " pod="openshift-ovn-kubernetes/ovnkube-node-hr4cm" Sep 29 18:56:53 crc kubenswrapper[4792]: I0929 18:56:53.880785 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/716c5fdd-0e02-4066-9210-93d805b6fe81-ovnkube-config\") pod \"ovnkube-node-hr4cm\" (UID: \"716c5fdd-0e02-4066-9210-93d805b6fe81\") " pod="openshift-ovn-kubernetes/ovnkube-node-hr4cm" Sep 29 18:56:53 crc kubenswrapper[4792]: I0929 18:56:53.880939 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/67c58ee5-e056-4e3e-91ed-a116350f2408-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-rqbjv\" (UID: \"67c58ee5-e056-4e3e-91ed-a116350f2408\") " pod="openshift-multus/multus-additional-cni-plugins-rqbjv" Sep 29 18:56:53 crc kubenswrapper[4792]: I0929 18:56:53.880980 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/67c58ee5-e056-4e3e-91ed-a116350f2408-system-cni-dir\") pod \"multus-additional-cni-plugins-rqbjv\" (UID: \"67c58ee5-e056-4e3e-91ed-a116350f2408\") " pod="openshift-multus/multus-additional-cni-plugins-rqbjv" Sep 29 18:56:53 crc kubenswrapper[4792]: I0929 18:56:53.881015 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/100876d3-2539-47f1-91fa-0f91456ccac1-cnibin\") pod \"multus-5hwvp\" (UID: \"100876d3-2539-47f1-91fa-0f91456ccac1\") " pod="openshift-multus/multus-5hwvp" Sep 29 18:56:53 crc kubenswrapper[4792]: I0929 18:56:53.881039 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/100876d3-2539-47f1-91fa-0f91456ccac1-host-var-lib-cni-bin\") pod \"multus-5hwvp\" (UID: \"100876d3-2539-47f1-91fa-0f91456ccac1\") " pod="openshift-multus/multus-5hwvp" Sep 29 18:56:53 crc kubenswrapper[4792]: I0929 18:56:53.881061 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/100876d3-2539-47f1-91fa-0f91456ccac1-host-var-lib-cni-multus\") pod \"multus-5hwvp\" (UID: \"100876d3-2539-47f1-91fa-0f91456ccac1\") " pod="openshift-multus/multus-5hwvp" Sep 29 18:56:53 crc kubenswrapper[4792]: I0929 18:56:53.881085 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/100876d3-2539-47f1-91fa-0f91456ccac1-etc-kubernetes\") pod \"multus-5hwvp\" (UID: \"100876d3-2539-47f1-91fa-0f91456ccac1\") " pod="openshift-multus/multus-5hwvp" Sep 29 18:56:53 crc kubenswrapper[4792]: I0929 18:56:53.881108 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/716c5fdd-0e02-4066-9210-93d805b6fe81-systemd-units\") pod \"ovnkube-node-hr4cm\" (UID: \"716c5fdd-0e02-4066-9210-93d805b6fe81\") " pod="openshift-ovn-kubernetes/ovnkube-node-hr4cm" Sep 29 18:56:53 crc kubenswrapper[4792]: I0929 18:56:53.881100 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/716c5fdd-0e02-4066-9210-93d805b6fe81-host-cni-bin\") pod \"ovnkube-node-hr4cm\" (UID: \"716c5fdd-0e02-4066-9210-93d805b6fe81\") " pod="openshift-ovn-kubernetes/ovnkube-node-hr4cm" Sep 29 18:56:53 crc kubenswrapper[4792]: I0929 18:56:53.881131 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/716c5fdd-0e02-4066-9210-93d805b6fe81-env-overrides\") pod \"ovnkube-node-hr4cm\" (UID: \"716c5fdd-0e02-4066-9210-93d805b6fe81\") " pod="openshift-ovn-kubernetes/ovnkube-node-hr4cm" Sep 29 18:56:53 crc kubenswrapper[4792]: I0929 18:56:53.881142 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/716c5fdd-0e02-4066-9210-93d805b6fe81-host-run-netns\") pod \"ovnkube-node-hr4cm\" (UID: \"716c5fdd-0e02-4066-9210-93d805b6fe81\") " pod="openshift-ovn-kubernetes/ovnkube-node-hr4cm" Sep 29 18:56:53 crc kubenswrapper[4792]: I0929 18:56:53.880224 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/100876d3-2539-47f1-91fa-0f91456ccac1-cni-binary-copy\") pod \"multus-5hwvp\" (UID: \"100876d3-2539-47f1-91fa-0f91456ccac1\") " pod="openshift-multus/multus-5hwvp" Sep 29 18:56:53 crc kubenswrapper[4792]: I0929 18:56:53.881131 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/716c5fdd-0e02-4066-9210-93d805b6fe81-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-hr4cm\" (UID: \"716c5fdd-0e02-4066-9210-93d805b6fe81\") " pod="openshift-ovn-kubernetes/ovnkube-node-hr4cm" Sep 29 18:56:53 crc kubenswrapper[4792]: I0929 18:56:53.881167 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/100876d3-2539-47f1-91fa-0f91456ccac1-host-run-multus-certs\") pod \"multus-5hwvp\" (UID: \"100876d3-2539-47f1-91fa-0f91456ccac1\") " pod="openshift-multus/multus-5hwvp" Sep 29 18:56:53 crc kubenswrapper[4792]: I0929 18:56:53.881610 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/67c58ee5-e056-4e3e-91ed-a116350f2408-cni-binary-copy\") pod \"multus-additional-cni-plugins-rqbjv\" (UID: \"67c58ee5-e056-4e3e-91ed-a116350f2408\") " pod="openshift-multus/multus-additional-cni-plugins-rqbjv" Sep 29 18:56:53 crc kubenswrapper[4792]: I0929 18:56:53.881626 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/100876d3-2539-47f1-91fa-0f91456ccac1-multus-daemon-config\") pod \"multus-5hwvp\" (UID: \"100876d3-2539-47f1-91fa-0f91456ccac1\") " pod="openshift-multus/multus-5hwvp" Sep 29 18:56:53 crc kubenswrapper[4792]: I0929 18:56:53.881652 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/100876d3-2539-47f1-91fa-0f91456ccac1-host-run-k8s-cni-cncf-io\") pod \"multus-5hwvp\" (UID: \"100876d3-2539-47f1-91fa-0f91456ccac1\") " pod="openshift-multus/multus-5hwvp" Sep 29 18:56:53 crc kubenswrapper[4792]: I0929 18:56:53.881907 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/67c58ee5-e056-4e3e-91ed-a116350f2408-tuning-conf-dir\") pod \"multus-additional-cni-plugins-rqbjv\" (UID: \"67c58ee5-e056-4e3e-91ed-a116350f2408\") " pod="openshift-multus/multus-additional-cni-plugins-rqbjv" Sep 29 18:56:53 crc kubenswrapper[4792]: I0929 18:56:53.884001 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0ae66548-086e-4ca9-bd6f-281ce46e7557-proxy-tls\") pod \"machine-config-daemon-p5q59\" (UID: \"0ae66548-086e-4ca9-bd6f-281ce46e7557\") " pod="openshift-machine-config-operator/machine-config-daemon-p5q59" Sep 29 18:56:53 crc kubenswrapper[4792]: I0929 18:56:53.885058 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/716c5fdd-0e02-4066-9210-93d805b6fe81-ovn-node-metrics-cert\") pod \"ovnkube-node-hr4cm\" (UID: \"716c5fdd-0e02-4066-9210-93d805b6fe81\") " pod="openshift-ovn-kubernetes/ovnkube-node-hr4cm" Sep 29 18:56:53 crc kubenswrapper[4792]: I0929 18:56:53.898549 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-72xxv\" (UniqueName: \"kubernetes.io/projected/716c5fdd-0e02-4066-9210-93d805b6fe81-kube-api-access-72xxv\") pod \"ovnkube-node-hr4cm\" (UID: \"716c5fdd-0e02-4066-9210-93d805b6fe81\") " pod="openshift-ovn-kubernetes/ovnkube-node-hr4cm" Sep 29 18:56:53 crc kubenswrapper[4792]: I0929 18:56:53.904744 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mfblz\" (UniqueName: \"kubernetes.io/projected/100876d3-2539-47f1-91fa-0f91456ccac1-kube-api-access-mfblz\") pod \"multus-5hwvp\" (UID: \"100876d3-2539-47f1-91fa-0f91456ccac1\") " pod="openshift-multus/multus-5hwvp" Sep 29 18:56:53 crc kubenswrapper[4792]: I0929 18:56:53.905359 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kdrmh\" (UniqueName: \"kubernetes.io/projected/0ae66548-086e-4ca9-bd6f-281ce46e7557-kube-api-access-kdrmh\") pod \"machine-config-daemon-p5q59\" (UID: \"0ae66548-086e-4ca9-bd6f-281ce46e7557\") " pod="openshift-machine-config-operator/machine-config-daemon-p5q59" Sep 29 18:56:53 crc kubenswrapper[4792]: I0929 18:56:53.905657 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ms9xw\" (UniqueName: \"kubernetes.io/projected/67c58ee5-e056-4e3e-91ed-a116350f2408-kube-api-access-ms9xw\") pod \"multus-additional-cni-plugins-rqbjv\" (UID: \"67c58ee5-e056-4e3e-91ed-a116350f2408\") " pod="openshift-multus/multus-additional-cni-plugins-rqbjv" Sep 29 18:56:53 crc kubenswrapper[4792]: I0929 18:56:53.907076 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:56:53Z is after 2025-08-24T17:21:41Z" Sep 29 18:56:53 crc kubenswrapper[4792]: I0929 18:56:53.920357 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:56:53Z is after 2025-08-24T17:21:41Z" Sep 29 18:56:53 crc kubenswrapper[4792]: I0929 18:56:53.930873 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-c228l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc16dcda-372e-4aac-8c12-148bf93e8783\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:52Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:52Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:52Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6mz5l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:52Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-c228l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:56:53Z is after 2025-08-24T17:21:41Z" Sep 29 18:56:53 crc kubenswrapper[4792]: I0929 18:56:53.942515 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:56:53Z is after 2025-08-24T17:21:41Z" Sep 29 18:56:53 crc kubenswrapper[4792]: I0929 18:56:53.955581 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"49187618-8fed-4b0f-bdf8-800408f708fc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://325b543480e9e1abd49c6ce98398a79ef51983b8035774b2e88447ee547733c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://12d3875b8db9620798f766024b1bc43b78759f42e467b67aaf87f0b0154a8fad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://100ab44da711fddded7f88aa053b6a47d1c8302557d9ae6a56d4f744140e34bd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://af34e705a941f92c031edf3d214a902640010036401914f60e598a46043d5eb3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:29Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:56:53Z is after 2025-08-24T17:21:41Z" Sep 29 18:56:53 crc kubenswrapper[4792]: I0929 18:56:53.966250 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:52Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:52Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cb29207afd9a5fb06242890aaf6d32f2f789cbf824b0246706e7214486ac529c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:56:53Z is after 2025-08-24T17:21:41Z" Sep 29 18:56:53 crc kubenswrapper[4792]: I0929 18:56:53.976930 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0ae66548-086e-4ca9-bd6f-281ce46e7557\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:53Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:53Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:53Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:53Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-p5q59\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:56:53Z is after 2025-08-24T17:21:41Z" Sep 29 18:56:53 crc kubenswrapper[4792]: I0929 18:56:53.997640 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-rqbjv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"67c58ee5-e056-4e3e-91ed-a116350f2408\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:53Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:53Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:53Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:53Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ms9xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ms9xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ms9xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ms9xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ms9xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ms9xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ms9xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:53Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-rqbjv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:56:53Z is after 2025-08-24T17:21:41Z" Sep 29 18:56:54 crc kubenswrapper[4792]: I0929 18:56:54.014874 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-5hwvp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"100876d3-2539-47f1-91fa-0f91456ccac1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:53Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:53Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:53Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfblz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:53Z\\\"}}\" for pod \"openshift-multus\"/\"multus-5hwvp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:56:54Z is after 2025-08-24T17:21:41Z" Sep 29 18:56:54 crc kubenswrapper[4792]: I0929 18:56:54.033565 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"10bc9cb6-78d3-43a6-8276-db1cb1c116e0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:29Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:29Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://238035b6ad975064a44e7e9e760ae9f09c9ff2735ecc69f65df3fc3176f6d3b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://659f798faed7d5c35bf7959b8e42a37f2289854714d513962716cb73a0674a27\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://38d29fa86782b007a60cbc1ea8f2ccc250dabb38c84eb61931b05fbc170e6538\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0fdb8c0687f0a5ce31078f6d7a9b643c41ad23199eff4b2878403ee5fd31f69f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0fdb8c0687f0a5ce31078f6d7a9b643c41ad23199eff4b2878403ee5fd31f69f\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T18:56:45Z\\\",\\\"message\\\":\\\"9 18:56:45.310775 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0929 18:56:45.310981 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI0929 18:56:45.312802 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0929 18:56:45.312836 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0929 18:56:45.312870 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0929 18:56:45.312900 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0929 18:56:45.312941 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI0929 18:56:45.312944 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0929 18:56:45.312985 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI0929 18:56:45.313033 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI0929 18:56:45.313121 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-682747971/tls.crt::/tmp/serving-cert-682747971/tls.key\\\\\\\"\\\\nI0929 18:56:45.313163 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-682747971/tls.crt::/tmp/serving-cert-682747971/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1759172204\\\\\\\\\\\\\\\" (2025-09-29 18:56:43 +0000 UTC to 2025-10-29 18:56:44 +0000 UTC (now=2025-09-29 18:56:45.313121069 +0000 UTC))\\\\\\\"\\\\nF0929 18:56:45.313206 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:44Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://07171a8c3c7812c016ee534ff1332f697d2b2cdfa70fc9d94ae6a5f312e0e433\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:32Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6e733b73d0293211bf2e8e97dc7db49c34e8ac1ef1e6e19013183d8518345959\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6e733b73d0293211bf2e8e97dc7db49c34e8ac1ef1e6e19013183d8518345959\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T18:56:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:29Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:56:54Z is after 2025-08-24T17:21:41Z" Sep 29 18:56:54 crc kubenswrapper[4792]: I0929 18:56:54.044771 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ba8b9fef5faf6504a0e363f092cc9f60b03723775a0a0624b6302b3dac43a7ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:56:54Z is after 2025-08-24T17:21:41Z" Sep 29 18:56:54 crc kubenswrapper[4792]: I0929 18:56:54.053470 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" Sep 29 18:56:54 crc kubenswrapper[4792]: I0929 18:56:54.064504 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-additional-cni-plugins-rqbjv" Sep 29 18:56:54 crc kubenswrapper[4792]: W0929 18:56:54.065586 4792 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod0ae66548_086e_4ca9_bd6f_281ce46e7557.slice/crio-60658279d84b305efe7641cc0687ebf982ab078d007659f333b5dc2638afcdb8 WatchSource:0}: Error finding container 60658279d84b305efe7641cc0687ebf982ab078d007659f333b5dc2638afcdb8: Status 404 returned error can't find the container with id 60658279d84b305efe7641cc0687ebf982ab078d007659f333b5dc2638afcdb8 Sep 29 18:56:54 crc kubenswrapper[4792]: I0929 18:56:54.070530 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-hr4cm" Sep 29 18:56:54 crc kubenswrapper[4792]: I0929 18:56:54.072056 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8067c4cf598ce2d361c4a76b51ef3cf14d1fc84ad7ee193d76e20cd980f197be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://693ef3ee15f0b8762a16adc20435397e073dea4b0028f4175899cb956eaab303\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:56:54Z is after 2025-08-24T17:21:41Z" Sep 29 18:56:54 crc kubenswrapper[4792]: I0929 18:56:54.075930 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-5hwvp" Sep 29 18:56:54 crc kubenswrapper[4792]: W0929 18:56:54.086708 4792 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod67c58ee5_e056_4e3e_91ed_a116350f2408.slice/crio-dc19a69df94d8d36ce11f1fac98dc580d9d514a92ff04ed1d84ef3e97fb404e9 WatchSource:0}: Error finding container dc19a69df94d8d36ce11f1fac98dc580d9d514a92ff04ed1d84ef3e97fb404e9: Status 404 returned error can't find the container with id dc19a69df94d8d36ce11f1fac98dc580d9d514a92ff04ed1d84ef3e97fb404e9 Sep 29 18:56:54 crc kubenswrapper[4792]: I0929 18:56:54.103187 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-hr4cm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"716c5fdd-0e02-4066-9210-93d805b6fe81\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:53Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:53Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:53Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:53Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:53Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-hr4cm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:56:54Z is after 2025-08-24T17:21:41Z" Sep 29 18:56:54 crc kubenswrapper[4792]: I0929 18:56:54.153048 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-rqbjv" event={"ID":"67c58ee5-e056-4e3e-91ed-a116350f2408","Type":"ContainerStarted","Data":"dc19a69df94d8d36ce11f1fac98dc580d9d514a92ff04ed1d84ef3e97fb404e9"} Sep 29 18:56:54 crc kubenswrapper[4792]: I0929 18:56:54.158649 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-5hwvp" event={"ID":"100876d3-2539-47f1-91fa-0f91456ccac1","Type":"ContainerStarted","Data":"8e31dbbcc2f49d5707ff6bb724596f20aeb5a6e9f81ca592fe99ba83d3397ffa"} Sep 29 18:56:54 crc kubenswrapper[4792]: I0929 18:56:54.159572 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-hr4cm" event={"ID":"716c5fdd-0e02-4066-9210-93d805b6fe81","Type":"ContainerStarted","Data":"cf070e273fcce7f67e8404720b4fc0f985e467b495a02b9f70058a2809243b6f"} Sep 29 18:56:54 crc kubenswrapper[4792]: I0929 18:56:54.162558 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" event={"ID":"0ae66548-086e-4ca9-bd6f-281ce46e7557","Type":"ContainerStarted","Data":"60658279d84b305efe7641cc0687ebf982ab078d007659f333b5dc2638afcdb8"} Sep 29 18:56:54 crc kubenswrapper[4792]: I0929 18:56:54.164159 4792 scope.go:117] "RemoveContainer" containerID="0fdb8c0687f0a5ce31078f6d7a9b643c41ad23199eff4b2878403ee5fd31f69f" Sep 29 18:56:54 crc kubenswrapper[4792]: E0929 18:56:54.164359 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-apiserver-check-endpoints\" with CrashLoopBackOff: \"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\"" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" Sep 29 18:56:54 crc kubenswrapper[4792]: I0929 18:56:54.164883 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/node-resolver-c228l" event={"ID":"fc16dcda-372e-4aac-8c12-148bf93e8783","Type":"ContainerStarted","Data":"af2529526423852e215c3201a4d8807a880e07e9cf71d593f304a4a3c99900eb"} Sep 29 18:56:54 crc kubenswrapper[4792]: I0929 18:56:54.179006 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"49187618-8fed-4b0f-bdf8-800408f708fc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://325b543480e9e1abd49c6ce98398a79ef51983b8035774b2e88447ee547733c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://12d3875b8db9620798f766024b1bc43b78759f42e467b67aaf87f0b0154a8fad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://100ab44da711fddded7f88aa053b6a47d1c8302557d9ae6a56d4f744140e34bd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://af34e705a941f92c031edf3d214a902640010036401914f60e598a46043d5eb3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:29Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:56:54Z is after 2025-08-24T17:21:41Z" Sep 29 18:56:54 crc kubenswrapper[4792]: I0929 18:56:54.188505 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:52Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:52Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cb29207afd9a5fb06242890aaf6d32f2f789cbf824b0246706e7214486ac529c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:56:54Z is after 2025-08-24T17:21:41Z" Sep 29 18:56:54 crc kubenswrapper[4792]: I0929 18:56:54.203628 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0ae66548-086e-4ca9-bd6f-281ce46e7557\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:53Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:53Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:53Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:53Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-p5q59\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:56:54Z is after 2025-08-24T17:21:41Z" Sep 29 18:56:54 crc kubenswrapper[4792]: I0929 18:56:54.219531 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-rqbjv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"67c58ee5-e056-4e3e-91ed-a116350f2408\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:53Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:53Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:53Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:53Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ms9xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ms9xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ms9xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ms9xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ms9xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ms9xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ms9xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:53Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-rqbjv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:56:54Z is after 2025-08-24T17:21:41Z" Sep 29 18:56:54 crc kubenswrapper[4792]: I0929 18:56:54.234830 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"10bc9cb6-78d3-43a6-8276-db1cb1c116e0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:29Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:29Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://238035b6ad975064a44e7e9e760ae9f09c9ff2735ecc69f65df3fc3176f6d3b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://659f798faed7d5c35bf7959b8e42a37f2289854714d513962716cb73a0674a27\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://38d29fa86782b007a60cbc1ea8f2ccc250dabb38c84eb61931b05fbc170e6538\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0fdb8c0687f0a5ce31078f6d7a9b643c41ad23199eff4b2878403ee5fd31f69f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0fdb8c0687f0a5ce31078f6d7a9b643c41ad23199eff4b2878403ee5fd31f69f\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T18:56:45Z\\\",\\\"message\\\":\\\"9 18:56:45.310775 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0929 18:56:45.310981 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI0929 18:56:45.312802 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0929 18:56:45.312836 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0929 18:56:45.312870 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0929 18:56:45.312900 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0929 18:56:45.312941 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI0929 18:56:45.312944 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0929 18:56:45.312985 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI0929 18:56:45.313033 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI0929 18:56:45.313121 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-682747971/tls.crt::/tmp/serving-cert-682747971/tls.key\\\\\\\"\\\\nI0929 18:56:45.313163 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-682747971/tls.crt::/tmp/serving-cert-682747971/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1759172204\\\\\\\\\\\\\\\" (2025-09-29 18:56:43 +0000 UTC to 2025-10-29 18:56:44 +0000 UTC (now=2025-09-29 18:56:45.313121069 +0000 UTC))\\\\\\\"\\\\nF0929 18:56:45.313206 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:44Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://07171a8c3c7812c016ee534ff1332f697d2b2cdfa70fc9d94ae6a5f312e0e433\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:32Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6e733b73d0293211bf2e8e97dc7db49c34e8ac1ef1e6e19013183d8518345959\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6e733b73d0293211bf2e8e97dc7db49c34e8ac1ef1e6e19013183d8518345959\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T18:56:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:29Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:56:54Z is after 2025-08-24T17:21:41Z" Sep 29 18:56:54 crc kubenswrapper[4792]: I0929 18:56:54.247998 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ba8b9fef5faf6504a0e363f092cc9f60b03723775a0a0624b6302b3dac43a7ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:56:54Z is after 2025-08-24T17:21:41Z" Sep 29 18:56:54 crc kubenswrapper[4792]: I0929 18:56:54.258590 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8067c4cf598ce2d361c4a76b51ef3cf14d1fc84ad7ee193d76e20cd980f197be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://693ef3ee15f0b8762a16adc20435397e073dea4b0028f4175899cb956eaab303\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:56:54Z is after 2025-08-24T17:21:41Z" Sep 29 18:56:54 crc kubenswrapper[4792]: I0929 18:56:54.276597 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-hr4cm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"716c5fdd-0e02-4066-9210-93d805b6fe81\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:53Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:53Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:53Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:53Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:53Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-hr4cm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:56:54Z is after 2025-08-24T17:21:41Z" Sep 29 18:56:54 crc kubenswrapper[4792]: I0929 18:56:54.290956 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-5hwvp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"100876d3-2539-47f1-91fa-0f91456ccac1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:53Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:53Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:53Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfblz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:53Z\\\"}}\" for pod \"openshift-multus\"/\"multus-5hwvp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:56:54Z is after 2025-08-24T17:21:41Z" Sep 29 18:56:54 crc kubenswrapper[4792]: I0929 18:56:54.305265 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:56:54Z is after 2025-08-24T17:21:41Z" Sep 29 18:56:54 crc kubenswrapper[4792]: I0929 18:56:54.316911 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:56:54Z is after 2025-08-24T17:21:41Z" Sep 29 18:56:54 crc kubenswrapper[4792]: I0929 18:56:54.327984 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-c228l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc16dcda-372e-4aac-8c12-148bf93e8783\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://af2529526423852e215c3201a4d8807a880e07e9cf71d593f304a4a3c99900eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6mz5l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:52Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-c228l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:56:54Z is after 2025-08-24T17:21:41Z" Sep 29 18:56:54 crc kubenswrapper[4792]: I0929 18:56:54.339842 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:56:54Z is after 2025-08-24T17:21:41Z" Sep 29 18:56:54 crc kubenswrapper[4792]: I0929 18:56:54.928926 4792 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 18:56:54 crc kubenswrapper[4792]: I0929 18:56:54.931372 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:56:54 crc kubenswrapper[4792]: I0929 18:56:54.931416 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:56:54 crc kubenswrapper[4792]: I0929 18:56:54.931425 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:56:54 crc kubenswrapper[4792]: I0929 18:56:54.931603 4792 kubelet_node_status.go:76] "Attempting to register node" node="crc" Sep 29 18:56:54 crc kubenswrapper[4792]: I0929 18:56:54.937817 4792 kubelet_node_status.go:115] "Node was previously registered" node="crc" Sep 29 18:56:54 crc kubenswrapper[4792]: I0929 18:56:54.938135 4792 kubelet_node_status.go:79] "Successfully registered node" node="crc" Sep 29 18:56:54 crc kubenswrapper[4792]: I0929 18:56:54.939222 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:56:54 crc kubenswrapper[4792]: I0929 18:56:54.939248 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:56:54 crc kubenswrapper[4792]: I0929 18:56:54.939257 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:56:54 crc kubenswrapper[4792]: I0929 18:56:54.939273 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:56:54 crc kubenswrapper[4792]: I0929 18:56:54.939284 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:56:54Z","lastTransitionTime":"2025-09-29T18:56:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:56:54 crc kubenswrapper[4792]: E0929 18:56:54.958211 4792 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T18:56:54Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:54Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T18:56:54Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:54Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T18:56:54Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:54Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T18:56:54Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:54Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"2b56982a-4dd9-4681-8997-0d414fe55985\\\",\\\"systemUUID\\\":\\\"798197c6-3029-4938-8b57-256852c71a3e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:56:54Z is after 2025-08-24T17:21:41Z" Sep 29 18:56:54 crc kubenswrapper[4792]: I0929 18:56:54.961346 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:56:54 crc kubenswrapper[4792]: I0929 18:56:54.961382 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:56:54 crc kubenswrapper[4792]: I0929 18:56:54.961392 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:56:54 crc kubenswrapper[4792]: I0929 18:56:54.961406 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:56:54 crc kubenswrapper[4792]: I0929 18:56:54.961415 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:56:54Z","lastTransitionTime":"2025-09-29T18:56:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:56:54 crc kubenswrapper[4792]: E0929 18:56:54.973302 4792 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T18:56:54Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:54Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T18:56:54Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:54Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T18:56:54Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:54Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T18:56:54Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:54Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"2b56982a-4dd9-4681-8997-0d414fe55985\\\",\\\"systemUUID\\\":\\\"798197c6-3029-4938-8b57-256852c71a3e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:56:54Z is after 2025-08-24T17:21:41Z" Sep 29 18:56:54 crc kubenswrapper[4792]: I0929 18:56:54.976687 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:56:54 crc kubenswrapper[4792]: I0929 18:56:54.976723 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:56:54 crc kubenswrapper[4792]: I0929 18:56:54.976737 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:56:54 crc kubenswrapper[4792]: I0929 18:56:54.976753 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:56:54 crc kubenswrapper[4792]: I0929 18:56:54.976763 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:56:54Z","lastTransitionTime":"2025-09-29T18:56:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:56:54 crc kubenswrapper[4792]: E0929 18:56:54.993888 4792 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T18:56:54Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:54Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T18:56:54Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:54Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T18:56:54Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:54Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T18:56:54Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:54Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"2b56982a-4dd9-4681-8997-0d414fe55985\\\",\\\"systemUUID\\\":\\\"798197c6-3029-4938-8b57-256852c71a3e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:56:54Z is after 2025-08-24T17:21:41Z" Sep 29 18:56:54 crc kubenswrapper[4792]: I0929 18:56:54.997918 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:56:54 crc kubenswrapper[4792]: I0929 18:56:54.997948 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:56:54 crc kubenswrapper[4792]: I0929 18:56:54.997957 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:56:54 crc kubenswrapper[4792]: I0929 18:56:54.997970 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:56:54 crc kubenswrapper[4792]: I0929 18:56:54.997980 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:56:54Z","lastTransitionTime":"2025-09-29T18:56:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:56:55 crc kubenswrapper[4792]: I0929 18:56:55.009185 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 29 18:56:55 crc kubenswrapper[4792]: E0929 18:56:55.010110 4792 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T18:56:54Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:54Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T18:56:54Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:54Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T18:56:54Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:54Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T18:56:54Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:54Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"2b56982a-4dd9-4681-8997-0d414fe55985\\\",\\\"systemUUID\\\":\\\"798197c6-3029-4938-8b57-256852c71a3e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:56:55Z is after 2025-08-24T17:21:41Z" Sep 29 18:56:55 crc kubenswrapper[4792]: I0929 18:56:55.013528 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:56:55 crc kubenswrapper[4792]: I0929 18:56:55.013577 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:56:55 crc kubenswrapper[4792]: I0929 18:56:55.013586 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:56:55 crc kubenswrapper[4792]: I0929 18:56:55.013603 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:56:55 crc kubenswrapper[4792]: I0929 18:56:55.013616 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:56:55Z","lastTransitionTime":"2025-09-29T18:56:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:56:55 crc kubenswrapper[4792]: I0929 18:56:55.014695 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 18:56:55 crc kubenswrapper[4792]: I0929 18:56:55.014695 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 18:56:55 crc kubenswrapper[4792]: E0929 18:56:55.014786 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 18:56:55 crc kubenswrapper[4792]: I0929 18:56:55.014816 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 18:56:55 crc kubenswrapper[4792]: E0929 18:56:55.014913 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 18:56:55 crc kubenswrapper[4792]: E0929 18:56:55.015086 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 18:56:55 crc kubenswrapper[4792]: E0929 18:56:55.026125 4792 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"2b56982a-4dd9-4681-8997-0d414fe55985\\\",\\\"systemUUID\\\":\\\"798197c6-3029-4938-8b57-256852c71a3e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:56:55Z is after 2025-08-24T17:21:41Z" Sep 29 18:56:55 crc kubenswrapper[4792]: E0929 18:56:55.026313 4792 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Sep 29 18:56:55 crc kubenswrapper[4792]: I0929 18:56:55.027725 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:56:55 crc kubenswrapper[4792]: I0929 18:56:55.027750 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:56:55 crc kubenswrapper[4792]: I0929 18:56:55.027759 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:56:55 crc kubenswrapper[4792]: I0929 18:56:55.027774 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:56:55 crc kubenswrapper[4792]: I0929 18:56:55.027784 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:56:55Z","lastTransitionTime":"2025-09-29T18:56:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:56:55 crc kubenswrapper[4792]: I0929 18:56:55.130432 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:56:55 crc kubenswrapper[4792]: I0929 18:56:55.130464 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:56:55 crc kubenswrapper[4792]: I0929 18:56:55.130472 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:56:55 crc kubenswrapper[4792]: I0929 18:56:55.130485 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:56:55 crc kubenswrapper[4792]: I0929 18:56:55.130495 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:56:55Z","lastTransitionTime":"2025-09-29T18:56:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:56:55 crc kubenswrapper[4792]: I0929 18:56:55.166753 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-5hwvp" event={"ID":"100876d3-2539-47f1-91fa-0f91456ccac1","Type":"ContainerStarted","Data":"3ff4d11cfba0349ddf3f5a14c525716cfdff95c71698634e8feca328d6e41e2d"} Sep 29 18:56:55 crc kubenswrapper[4792]: I0929 18:56:55.168315 4792 generic.go:334] "Generic (PLEG): container finished" podID="716c5fdd-0e02-4066-9210-93d805b6fe81" containerID="d0516004c2ea4a5711f5e00dcfa01fd5c8d0c0d0d60fd31b0d7da586cd83a820" exitCode=0 Sep 29 18:56:55 crc kubenswrapper[4792]: I0929 18:56:55.168373 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-hr4cm" event={"ID":"716c5fdd-0e02-4066-9210-93d805b6fe81","Type":"ContainerDied","Data":"d0516004c2ea4a5711f5e00dcfa01fd5c8d0c0d0d60fd31b0d7da586cd83a820"} Sep 29 18:56:55 crc kubenswrapper[4792]: I0929 18:56:55.169891 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" event={"ID":"0ae66548-086e-4ca9-bd6f-281ce46e7557","Type":"ContainerStarted","Data":"b27d8e307d9f6545acd48d9a838dc98fec84ca2e48b357966af22144b8cd415f"} Sep 29 18:56:55 crc kubenswrapper[4792]: I0929 18:56:55.169939 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" event={"ID":"0ae66548-086e-4ca9-bd6f-281ce46e7557","Type":"ContainerStarted","Data":"305645f1f10b20984067c3d0d32bc9a5936e191faecff2bb494be005fc471c65"} Sep 29 18:56:55 crc kubenswrapper[4792]: I0929 18:56:55.171658 4792 generic.go:334] "Generic (PLEG): container finished" podID="67c58ee5-e056-4e3e-91ed-a116350f2408" containerID="cdd799a430b3a444b7ba74ae8c285de28790049390a462485812fe117f9dfbe8" exitCode=0 Sep 29 18:56:55 crc kubenswrapper[4792]: I0929 18:56:55.171885 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-rqbjv" event={"ID":"67c58ee5-e056-4e3e-91ed-a116350f2408","Type":"ContainerDied","Data":"cdd799a430b3a444b7ba74ae8c285de28790049390a462485812fe117f9dfbe8"} Sep 29 18:56:55 crc kubenswrapper[4792]: I0929 18:56:55.172682 4792 scope.go:117] "RemoveContainer" containerID="0fdb8c0687f0a5ce31078f6d7a9b643c41ad23199eff4b2878403ee5fd31f69f" Sep 29 18:56:55 crc kubenswrapper[4792]: E0929 18:56:55.172881 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-apiserver-check-endpoints\" with CrashLoopBackOff: \"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\"" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" Sep 29 18:56:55 crc kubenswrapper[4792]: I0929 18:56:55.186355 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"49187618-8fed-4b0f-bdf8-800408f708fc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://325b543480e9e1abd49c6ce98398a79ef51983b8035774b2e88447ee547733c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://12d3875b8db9620798f766024b1bc43b78759f42e467b67aaf87f0b0154a8fad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://100ab44da711fddded7f88aa053b6a47d1c8302557d9ae6a56d4f744140e34bd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://af34e705a941f92c031edf3d214a902640010036401914f60e598a46043d5eb3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:29Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:56:55Z is after 2025-08-24T17:21:41Z" Sep 29 18:56:55 crc kubenswrapper[4792]: I0929 18:56:55.205486 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:52Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:52Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cb29207afd9a5fb06242890aaf6d32f2f789cbf824b0246706e7214486ac529c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:56:55Z is after 2025-08-24T17:21:41Z" Sep 29 18:56:55 crc kubenswrapper[4792]: I0929 18:56:55.219926 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0ae66548-086e-4ca9-bd6f-281ce46e7557\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:53Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:53Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:53Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:53Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-p5q59\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:56:55Z is after 2025-08-24T17:21:41Z" Sep 29 18:56:55 crc kubenswrapper[4792]: I0929 18:56:55.232299 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:56:55 crc kubenswrapper[4792]: I0929 18:56:55.232330 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:56:55 crc kubenswrapper[4792]: I0929 18:56:55.232340 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:56:55 crc kubenswrapper[4792]: I0929 18:56:55.232357 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:56:55 crc kubenswrapper[4792]: I0929 18:56:55.232370 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:56:55Z","lastTransitionTime":"2025-09-29T18:56:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:56:55 crc kubenswrapper[4792]: I0929 18:56:55.242367 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-rqbjv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"67c58ee5-e056-4e3e-91ed-a116350f2408\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:53Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:53Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:53Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:53Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ms9xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ms9xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ms9xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ms9xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ms9xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ms9xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ms9xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:53Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-rqbjv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:56:55Z is after 2025-08-24T17:21:41Z" Sep 29 18:56:55 crc kubenswrapper[4792]: I0929 18:56:55.258402 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"10bc9cb6-78d3-43a6-8276-db1cb1c116e0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:29Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:29Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://238035b6ad975064a44e7e9e760ae9f09c9ff2735ecc69f65df3fc3176f6d3b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://659f798faed7d5c35bf7959b8e42a37f2289854714d513962716cb73a0674a27\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://38d29fa86782b007a60cbc1ea8f2ccc250dabb38c84eb61931b05fbc170e6538\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0fdb8c0687f0a5ce31078f6d7a9b643c41ad23199eff4b2878403ee5fd31f69f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0fdb8c0687f0a5ce31078f6d7a9b643c41ad23199eff4b2878403ee5fd31f69f\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T18:56:45Z\\\",\\\"message\\\":\\\"9 18:56:45.310775 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0929 18:56:45.310981 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI0929 18:56:45.312802 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0929 18:56:45.312836 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0929 18:56:45.312870 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0929 18:56:45.312900 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0929 18:56:45.312941 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI0929 18:56:45.312944 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0929 18:56:45.312985 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI0929 18:56:45.313033 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI0929 18:56:45.313121 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-682747971/tls.crt::/tmp/serving-cert-682747971/tls.key\\\\\\\"\\\\nI0929 18:56:45.313163 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-682747971/tls.crt::/tmp/serving-cert-682747971/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1759172204\\\\\\\\\\\\\\\" (2025-09-29 18:56:43 +0000 UTC to 2025-10-29 18:56:44 +0000 UTC (now=2025-09-29 18:56:45.313121069 +0000 UTC))\\\\\\\"\\\\nF0929 18:56:45.313206 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:44Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://07171a8c3c7812c016ee534ff1332f697d2b2cdfa70fc9d94ae6a5f312e0e433\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:32Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6e733b73d0293211bf2e8e97dc7db49c34e8ac1ef1e6e19013183d8518345959\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6e733b73d0293211bf2e8e97dc7db49c34e8ac1ef1e6e19013183d8518345959\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T18:56:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:29Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:56:55Z is after 2025-08-24T17:21:41Z" Sep 29 18:56:55 crc kubenswrapper[4792]: I0929 18:56:55.270482 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ba8b9fef5faf6504a0e363f092cc9f60b03723775a0a0624b6302b3dac43a7ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:56:55Z is after 2025-08-24T17:21:41Z" Sep 29 18:56:55 crc kubenswrapper[4792]: I0929 18:56:55.284060 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8067c4cf598ce2d361c4a76b51ef3cf14d1fc84ad7ee193d76e20cd980f197be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://693ef3ee15f0b8762a16adc20435397e073dea4b0028f4175899cb956eaab303\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:56:55Z is after 2025-08-24T17:21:41Z" Sep 29 18:56:55 crc kubenswrapper[4792]: I0929 18:56:55.302787 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-hr4cm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"716c5fdd-0e02-4066-9210-93d805b6fe81\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:53Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:53Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:53Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:53Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:53Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-hr4cm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:56:55Z is after 2025-08-24T17:21:41Z" Sep 29 18:56:55 crc kubenswrapper[4792]: I0929 18:56:55.314566 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-5hwvp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"100876d3-2539-47f1-91fa-0f91456ccac1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3ff4d11cfba0349ddf3f5a14c525716cfdff95c71698634e8feca328d6e41e2d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfblz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:53Z\\\"}}\" for pod \"openshift-multus\"/\"multus-5hwvp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:56:55Z is after 2025-08-24T17:21:41Z" Sep 29 18:56:55 crc kubenswrapper[4792]: I0929 18:56:55.330261 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:56:55Z is after 2025-08-24T17:21:41Z" Sep 29 18:56:55 crc kubenswrapper[4792]: I0929 18:56:55.334274 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:56:55 crc kubenswrapper[4792]: I0929 18:56:55.334303 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:56:55 crc kubenswrapper[4792]: I0929 18:56:55.334312 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:56:55 crc kubenswrapper[4792]: I0929 18:56:55.334326 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:56:55 crc kubenswrapper[4792]: I0929 18:56:55.334335 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:56:55Z","lastTransitionTime":"2025-09-29T18:56:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:56:55 crc kubenswrapper[4792]: I0929 18:56:55.344081 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:56:55Z is after 2025-08-24T17:21:41Z" Sep 29 18:56:55 crc kubenswrapper[4792]: I0929 18:56:55.353945 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-c228l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc16dcda-372e-4aac-8c12-148bf93e8783\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://af2529526423852e215c3201a4d8807a880e07e9cf71d593f304a4a3c99900eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6mz5l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:52Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-c228l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:56:55Z is after 2025-08-24T17:21:41Z" Sep 29 18:56:55 crc kubenswrapper[4792]: I0929 18:56:55.367527 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:56:55Z is after 2025-08-24T17:21:41Z" Sep 29 18:56:55 crc kubenswrapper[4792]: I0929 18:56:55.381405 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-5hwvp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"100876d3-2539-47f1-91fa-0f91456ccac1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3ff4d11cfba0349ddf3f5a14c525716cfdff95c71698634e8feca328d6e41e2d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfblz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:53Z\\\"}}\" for pod \"openshift-multus\"/\"multus-5hwvp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:56:55Z is after 2025-08-24T17:21:41Z" Sep 29 18:56:55 crc kubenswrapper[4792]: I0929 18:56:55.394313 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"10bc9cb6-78d3-43a6-8276-db1cb1c116e0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:29Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:29Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://238035b6ad975064a44e7e9e760ae9f09c9ff2735ecc69f65df3fc3176f6d3b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://659f798faed7d5c35bf7959b8e42a37f2289854714d513962716cb73a0674a27\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://38d29fa86782b007a60cbc1ea8f2ccc250dabb38c84eb61931b05fbc170e6538\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0fdb8c0687f0a5ce31078f6d7a9b643c41ad23199eff4b2878403ee5fd31f69f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0fdb8c0687f0a5ce31078f6d7a9b643c41ad23199eff4b2878403ee5fd31f69f\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T18:56:45Z\\\",\\\"message\\\":\\\"9 18:56:45.310775 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0929 18:56:45.310981 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI0929 18:56:45.312802 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0929 18:56:45.312836 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0929 18:56:45.312870 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0929 18:56:45.312900 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0929 18:56:45.312941 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI0929 18:56:45.312944 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0929 18:56:45.312985 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI0929 18:56:45.313033 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI0929 18:56:45.313121 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-682747971/tls.crt::/tmp/serving-cert-682747971/tls.key\\\\\\\"\\\\nI0929 18:56:45.313163 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-682747971/tls.crt::/tmp/serving-cert-682747971/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1759172204\\\\\\\\\\\\\\\" (2025-09-29 18:56:43 +0000 UTC to 2025-10-29 18:56:44 +0000 UTC (now=2025-09-29 18:56:45.313121069 +0000 UTC))\\\\\\\"\\\\nF0929 18:56:45.313206 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:44Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://07171a8c3c7812c016ee534ff1332f697d2b2cdfa70fc9d94ae6a5f312e0e433\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:32Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6e733b73d0293211bf2e8e97dc7db49c34e8ac1ef1e6e19013183d8518345959\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6e733b73d0293211bf2e8e97dc7db49c34e8ac1ef1e6e19013183d8518345959\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T18:56:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:29Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:56:55Z is after 2025-08-24T17:21:41Z" Sep 29 18:56:55 crc kubenswrapper[4792]: I0929 18:56:55.410359 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ba8b9fef5faf6504a0e363f092cc9f60b03723775a0a0624b6302b3dac43a7ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:56:55Z is after 2025-08-24T17:21:41Z" Sep 29 18:56:55 crc kubenswrapper[4792]: I0929 18:56:55.432041 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8067c4cf598ce2d361c4a76b51ef3cf14d1fc84ad7ee193d76e20cd980f197be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://693ef3ee15f0b8762a16adc20435397e073dea4b0028f4175899cb956eaab303\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:56:55Z is after 2025-08-24T17:21:41Z" Sep 29 18:56:55 crc kubenswrapper[4792]: I0929 18:56:55.438151 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:56:55 crc kubenswrapper[4792]: I0929 18:56:55.438185 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:56:55 crc kubenswrapper[4792]: I0929 18:56:55.438196 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:56:55 crc kubenswrapper[4792]: I0929 18:56:55.438212 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:56:55 crc kubenswrapper[4792]: I0929 18:56:55.438224 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:56:55Z","lastTransitionTime":"2025-09-29T18:56:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:56:55 crc kubenswrapper[4792]: I0929 18:56:55.451013 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-hr4cm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"716c5fdd-0e02-4066-9210-93d805b6fe81\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:53Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:53Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d0516004c2ea4a5711f5e00dcfa01fd5c8d0c0d0d60fd31b0d7da586cd83a820\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d0516004c2ea4a5711f5e00dcfa01fd5c8d0c0d0d60fd31b0d7da586cd83a820\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T18:56:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:53Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-hr4cm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:56:55Z is after 2025-08-24T17:21:41Z" Sep 29 18:56:55 crc kubenswrapper[4792]: I0929 18:56:55.466646 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:56:55Z is after 2025-08-24T17:21:41Z" Sep 29 18:56:55 crc kubenswrapper[4792]: I0929 18:56:55.481938 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:56:55Z is after 2025-08-24T17:21:41Z" Sep 29 18:56:55 crc kubenswrapper[4792]: I0929 18:56:55.493099 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-c228l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc16dcda-372e-4aac-8c12-148bf93e8783\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://af2529526423852e215c3201a4d8807a880e07e9cf71d593f304a4a3c99900eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6mz5l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:52Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-c228l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:56:55Z is after 2025-08-24T17:21:41Z" Sep 29 18:56:55 crc kubenswrapper[4792]: I0929 18:56:55.506897 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:56:55Z is after 2025-08-24T17:21:41Z" Sep 29 18:56:55 crc kubenswrapper[4792]: I0929 18:56:55.520207 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"49187618-8fed-4b0f-bdf8-800408f708fc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://325b543480e9e1abd49c6ce98398a79ef51983b8035774b2e88447ee547733c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://12d3875b8db9620798f766024b1bc43b78759f42e467b67aaf87f0b0154a8fad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://100ab44da711fddded7f88aa053b6a47d1c8302557d9ae6a56d4f744140e34bd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://af34e705a941f92c031edf3d214a902640010036401914f60e598a46043d5eb3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:29Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:56:55Z is after 2025-08-24T17:21:41Z" Sep 29 18:56:55 crc kubenswrapper[4792]: I0929 18:56:55.533387 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:52Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:52Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cb29207afd9a5fb06242890aaf6d32f2f789cbf824b0246706e7214486ac529c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:56:55Z is after 2025-08-24T17:21:41Z" Sep 29 18:56:55 crc kubenswrapper[4792]: I0929 18:56:55.540558 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:56:55 crc kubenswrapper[4792]: I0929 18:56:55.540594 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:56:55 crc kubenswrapper[4792]: I0929 18:56:55.540603 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:56:55 crc kubenswrapper[4792]: I0929 18:56:55.540618 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:56:55 crc kubenswrapper[4792]: I0929 18:56:55.540629 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:56:55Z","lastTransitionTime":"2025-09-29T18:56:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:56:55 crc kubenswrapper[4792]: I0929 18:56:55.545754 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0ae66548-086e-4ca9-bd6f-281ce46e7557\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b27d8e307d9f6545acd48d9a838dc98fec84ca2e48b357966af22144b8cd415f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://305645f1f10b20984067c3d0d32bc9a5936e191faecff2bb494be005fc471c65\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:53Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-p5q59\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:56:55Z is after 2025-08-24T17:21:41Z" Sep 29 18:56:55 crc kubenswrapper[4792]: I0929 18:56:55.573270 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-rqbjv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"67c58ee5-e056-4e3e-91ed-a116350f2408\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:53Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:53Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:53Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ms9xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cdd799a430b3a444b7ba74ae8c285de28790049390a462485812fe117f9dfbe8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cdd799a430b3a444b7ba74ae8c285de28790049390a462485812fe117f9dfbe8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T18:56:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ms9xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ms9xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ms9xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ms9xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ms9xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ms9xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:53Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-rqbjv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:56:55Z is after 2025-08-24T17:21:41Z" Sep 29 18:56:55 crc kubenswrapper[4792]: I0929 18:56:55.642220 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:56:55 crc kubenswrapper[4792]: I0929 18:56:55.642261 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:56:55 crc kubenswrapper[4792]: I0929 18:56:55.642275 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:56:55 crc kubenswrapper[4792]: I0929 18:56:55.642290 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:56:55 crc kubenswrapper[4792]: I0929 18:56:55.642303 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:56:55Z","lastTransitionTime":"2025-09-29T18:56:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:56:55 crc kubenswrapper[4792]: I0929 18:56:55.744251 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:56:55 crc kubenswrapper[4792]: I0929 18:56:55.744288 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:56:55 crc kubenswrapper[4792]: I0929 18:56:55.744300 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:56:55 crc kubenswrapper[4792]: I0929 18:56:55.744316 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:56:55 crc kubenswrapper[4792]: I0929 18:56:55.744328 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:56:55Z","lastTransitionTime":"2025-09-29T18:56:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:56:55 crc kubenswrapper[4792]: I0929 18:56:55.846809 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:56:55 crc kubenswrapper[4792]: I0929 18:56:55.846868 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:56:55 crc kubenswrapper[4792]: I0929 18:56:55.846880 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:56:55 crc kubenswrapper[4792]: I0929 18:56:55.846899 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:56:55 crc kubenswrapper[4792]: I0929 18:56:55.846911 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:56:55Z","lastTransitionTime":"2025-09-29T18:56:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:56:55 crc kubenswrapper[4792]: I0929 18:56:55.949619 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:56:55 crc kubenswrapper[4792]: I0929 18:56:55.949968 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:56:55 crc kubenswrapper[4792]: I0929 18:56:55.949980 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:56:55 crc kubenswrapper[4792]: I0929 18:56:55.949997 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:56:55 crc kubenswrapper[4792]: I0929 18:56:55.950010 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:56:55Z","lastTransitionTime":"2025-09-29T18:56:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:56:55 crc kubenswrapper[4792]: I0929 18:56:55.961904 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/node-ca-4gmtk"] Sep 29 18:56:55 crc kubenswrapper[4792]: I0929 18:56:55.962223 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/node-ca-4gmtk" Sep 29 18:56:55 crc kubenswrapper[4792]: I0929 18:56:55.964571 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"openshift-service-ca.crt" Sep 29 18:56:55 crc kubenswrapper[4792]: I0929 18:56:55.964567 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"kube-root-ca.crt" Sep 29 18:56:55 crc kubenswrapper[4792]: I0929 18:56:55.964748 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"node-ca-dockercfg-4777p" Sep 29 18:56:55 crc kubenswrapper[4792]: I0929 18:56:55.964979 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"image-registry-certificates" Sep 29 18:56:55 crc kubenswrapper[4792]: I0929 18:56:55.976068 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ba8b9fef5faf6504a0e363f092cc9f60b03723775a0a0624b6302b3dac43a7ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:56:55Z is after 2025-08-24T17:21:41Z" Sep 29 18:56:55 crc kubenswrapper[4792]: I0929 18:56:55.989038 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8067c4cf598ce2d361c4a76b51ef3cf14d1fc84ad7ee193d76e20cd980f197be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://693ef3ee15f0b8762a16adc20435397e073dea4b0028f4175899cb956eaab303\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:56:55Z is after 2025-08-24T17:21:41Z" Sep 29 18:56:56 crc kubenswrapper[4792]: I0929 18:56:56.000561 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/b84b9e91-b50e-4271-bfc8-be15652128c5-serviceca\") pod \"node-ca-4gmtk\" (UID: \"b84b9e91-b50e-4271-bfc8-be15652128c5\") " pod="openshift-image-registry/node-ca-4gmtk" Sep 29 18:56:56 crc kubenswrapper[4792]: I0929 18:56:56.000599 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/b84b9e91-b50e-4271-bfc8-be15652128c5-host\") pod \"node-ca-4gmtk\" (UID: \"b84b9e91-b50e-4271-bfc8-be15652128c5\") " pod="openshift-image-registry/node-ca-4gmtk" Sep 29 18:56:56 crc kubenswrapper[4792]: I0929 18:56:56.000638 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lc999\" (UniqueName: \"kubernetes.io/projected/b84b9e91-b50e-4271-bfc8-be15652128c5-kube-api-access-lc999\") pod \"node-ca-4gmtk\" (UID: \"b84b9e91-b50e-4271-bfc8-be15652128c5\") " pod="openshift-image-registry/node-ca-4gmtk" Sep 29 18:56:56 crc kubenswrapper[4792]: I0929 18:56:56.005672 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-hr4cm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"716c5fdd-0e02-4066-9210-93d805b6fe81\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:53Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:53Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d0516004c2ea4a5711f5e00dcfa01fd5c8d0c0d0d60fd31b0d7da586cd83a820\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d0516004c2ea4a5711f5e00dcfa01fd5c8d0c0d0d60fd31b0d7da586cd83a820\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T18:56:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:53Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-hr4cm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:56:56Z is after 2025-08-24T17:21:41Z" Sep 29 18:56:56 crc kubenswrapper[4792]: I0929 18:56:56.018763 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-5hwvp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"100876d3-2539-47f1-91fa-0f91456ccac1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3ff4d11cfba0349ddf3f5a14c525716cfdff95c71698634e8feca328d6e41e2d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfblz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:53Z\\\"}}\" for pod \"openshift-multus\"/\"multus-5hwvp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:56:56Z is after 2025-08-24T17:21:41Z" Sep 29 18:56:56 crc kubenswrapper[4792]: I0929 18:56:56.033022 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"10bc9cb6-78d3-43a6-8276-db1cb1c116e0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:29Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:29Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://238035b6ad975064a44e7e9e760ae9f09c9ff2735ecc69f65df3fc3176f6d3b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://659f798faed7d5c35bf7959b8e42a37f2289854714d513962716cb73a0674a27\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://38d29fa86782b007a60cbc1ea8f2ccc250dabb38c84eb61931b05fbc170e6538\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0fdb8c0687f0a5ce31078f6d7a9b643c41ad23199eff4b2878403ee5fd31f69f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0fdb8c0687f0a5ce31078f6d7a9b643c41ad23199eff4b2878403ee5fd31f69f\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T18:56:45Z\\\",\\\"message\\\":\\\"9 18:56:45.310775 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0929 18:56:45.310981 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI0929 18:56:45.312802 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0929 18:56:45.312836 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0929 18:56:45.312870 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0929 18:56:45.312900 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0929 18:56:45.312941 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI0929 18:56:45.312944 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0929 18:56:45.312985 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI0929 18:56:45.313033 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI0929 18:56:45.313121 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-682747971/tls.crt::/tmp/serving-cert-682747971/tls.key\\\\\\\"\\\\nI0929 18:56:45.313163 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-682747971/tls.crt::/tmp/serving-cert-682747971/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1759172204\\\\\\\\\\\\\\\" (2025-09-29 18:56:43 +0000 UTC to 2025-10-29 18:56:44 +0000 UTC (now=2025-09-29 18:56:45.313121069 +0000 UTC))\\\\\\\"\\\\nF0929 18:56:45.313206 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:44Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://07171a8c3c7812c016ee534ff1332f697d2b2cdfa70fc9d94ae6a5f312e0e433\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:32Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6e733b73d0293211bf2e8e97dc7db49c34e8ac1ef1e6e19013183d8518345959\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6e733b73d0293211bf2e8e97dc7db49c34e8ac1ef1e6e19013183d8518345959\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T18:56:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:29Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:56:56Z is after 2025-08-24T17:21:41Z" Sep 29 18:56:56 crc kubenswrapper[4792]: I0929 18:56:56.045211 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:56:56Z is after 2025-08-24T17:21:41Z" Sep 29 18:56:56 crc kubenswrapper[4792]: I0929 18:56:56.052035 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:56:56 crc kubenswrapper[4792]: I0929 18:56:56.052059 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:56:56 crc kubenswrapper[4792]: I0929 18:56:56.052067 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:56:56 crc kubenswrapper[4792]: I0929 18:56:56.052081 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:56:56 crc kubenswrapper[4792]: I0929 18:56:56.052091 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:56:56Z","lastTransitionTime":"2025-09-29T18:56:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:56:56 crc kubenswrapper[4792]: I0929 18:56:56.057397 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:56:56Z is after 2025-08-24T17:21:41Z" Sep 29 18:56:56 crc kubenswrapper[4792]: I0929 18:56:56.071993 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-c228l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc16dcda-372e-4aac-8c12-148bf93e8783\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://af2529526423852e215c3201a4d8807a880e07e9cf71d593f304a4a3c99900eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6mz5l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:52Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-c228l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:56:56Z is after 2025-08-24T17:21:41Z" Sep 29 18:56:56 crc kubenswrapper[4792]: I0929 18:56:56.087533 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:56:56Z is after 2025-08-24T17:21:41Z" Sep 29 18:56:56 crc kubenswrapper[4792]: I0929 18:56:56.101393 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/b84b9e91-b50e-4271-bfc8-be15652128c5-host\") pod \"node-ca-4gmtk\" (UID: \"b84b9e91-b50e-4271-bfc8-be15652128c5\") " pod="openshift-image-registry/node-ca-4gmtk" Sep 29 18:56:56 crc kubenswrapper[4792]: I0929 18:56:56.101464 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lc999\" (UniqueName: \"kubernetes.io/projected/b84b9e91-b50e-4271-bfc8-be15652128c5-kube-api-access-lc999\") pod \"node-ca-4gmtk\" (UID: \"b84b9e91-b50e-4271-bfc8-be15652128c5\") " pod="openshift-image-registry/node-ca-4gmtk" Sep 29 18:56:56 crc kubenswrapper[4792]: I0929 18:56:56.101509 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/b84b9e91-b50e-4271-bfc8-be15652128c5-host\") pod \"node-ca-4gmtk\" (UID: \"b84b9e91-b50e-4271-bfc8-be15652128c5\") " pod="openshift-image-registry/node-ca-4gmtk" Sep 29 18:56:56 crc kubenswrapper[4792]: I0929 18:56:56.101518 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/b84b9e91-b50e-4271-bfc8-be15652128c5-serviceca\") pod \"node-ca-4gmtk\" (UID: \"b84b9e91-b50e-4271-bfc8-be15652128c5\") " pod="openshift-image-registry/node-ca-4gmtk" Sep 29 18:56:56 crc kubenswrapper[4792]: I0929 18:56:56.102521 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-4gmtk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b84b9e91-b50e-4271-bfc8-be15652128c5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lc999\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:55Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-4gmtk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:56:56Z is after 2025-08-24T17:21:41Z" Sep 29 18:56:56 crc kubenswrapper[4792]: I0929 18:56:56.102834 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/b84b9e91-b50e-4271-bfc8-be15652128c5-serviceca\") pod \"node-ca-4gmtk\" (UID: \"b84b9e91-b50e-4271-bfc8-be15652128c5\") " pod="openshift-image-registry/node-ca-4gmtk" Sep 29 18:56:56 crc kubenswrapper[4792]: I0929 18:56:56.121895 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:52Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:52Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cb29207afd9a5fb06242890aaf6d32f2f789cbf824b0246706e7214486ac529c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:56:56Z is after 2025-08-24T17:21:41Z" Sep 29 18:56:56 crc kubenswrapper[4792]: I0929 18:56:56.123294 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lc999\" (UniqueName: \"kubernetes.io/projected/b84b9e91-b50e-4271-bfc8-be15652128c5-kube-api-access-lc999\") pod \"node-ca-4gmtk\" (UID: \"b84b9e91-b50e-4271-bfc8-be15652128c5\") " pod="openshift-image-registry/node-ca-4gmtk" Sep 29 18:56:56 crc kubenswrapper[4792]: I0929 18:56:56.134564 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0ae66548-086e-4ca9-bd6f-281ce46e7557\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b27d8e307d9f6545acd48d9a838dc98fec84ca2e48b357966af22144b8cd415f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://305645f1f10b20984067c3d0d32bc9a5936e191faecff2bb494be005fc471c65\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:53Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-p5q59\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:56:56Z is after 2025-08-24T17:21:41Z" Sep 29 18:56:56 crc kubenswrapper[4792]: I0929 18:56:56.146260 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-rqbjv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"67c58ee5-e056-4e3e-91ed-a116350f2408\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:53Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:53Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:53Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ms9xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cdd799a430b3a444b7ba74ae8c285de28790049390a462485812fe117f9dfbe8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cdd799a430b3a444b7ba74ae8c285de28790049390a462485812fe117f9dfbe8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T18:56:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ms9xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ms9xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ms9xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ms9xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ms9xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ms9xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:53Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-rqbjv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:56:56Z is after 2025-08-24T17:21:41Z" Sep 29 18:56:56 crc kubenswrapper[4792]: I0929 18:56:56.154833 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:56:56 crc kubenswrapper[4792]: I0929 18:56:56.154931 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:56:56 crc kubenswrapper[4792]: I0929 18:56:56.154941 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:56:56 crc kubenswrapper[4792]: I0929 18:56:56.154954 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:56:56 crc kubenswrapper[4792]: I0929 18:56:56.154962 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:56:56Z","lastTransitionTime":"2025-09-29T18:56:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:56:56 crc kubenswrapper[4792]: I0929 18:56:56.159418 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"49187618-8fed-4b0f-bdf8-800408f708fc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://325b543480e9e1abd49c6ce98398a79ef51983b8035774b2e88447ee547733c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://12d3875b8db9620798f766024b1bc43b78759f42e467b67aaf87f0b0154a8fad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://100ab44da711fddded7f88aa053b6a47d1c8302557d9ae6a56d4f744140e34bd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://af34e705a941f92c031edf3d214a902640010036401914f60e598a46043d5eb3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:29Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:56:56Z is after 2025-08-24T17:21:41Z" Sep 29 18:56:56 crc kubenswrapper[4792]: I0929 18:56:56.176337 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-rqbjv" event={"ID":"67c58ee5-e056-4e3e-91ed-a116350f2408","Type":"ContainerStarted","Data":"1d779fed3cf67ba40d6664f26d829858ec14749c48c09678b73d7fb8fe73c827"} Sep 29 18:56:56 crc kubenswrapper[4792]: I0929 18:56:56.190230 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-hr4cm" event={"ID":"716c5fdd-0e02-4066-9210-93d805b6fe81","Type":"ContainerStarted","Data":"a5023531ae972c8f19f5fbf8cdb3c4040f1b63d5d7b9d00e885607f0f84c88a1"} Sep 29 18:56:56 crc kubenswrapper[4792]: I0929 18:56:56.190292 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-hr4cm" event={"ID":"716c5fdd-0e02-4066-9210-93d805b6fe81","Type":"ContainerStarted","Data":"46a3cf64e8fd5f5c75be0dd56175bd00e95e2780c73e39558e3b68ca1e6a44bc"} Sep 29 18:56:56 crc kubenswrapper[4792]: I0929 18:56:56.190307 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-hr4cm" event={"ID":"716c5fdd-0e02-4066-9210-93d805b6fe81","Type":"ContainerStarted","Data":"7b64445ce1e067504326c5005136522f885ba8796579cfb651019d2372a89173"} Sep 29 18:56:56 crc kubenswrapper[4792]: I0929 18:56:56.190320 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-hr4cm" event={"ID":"716c5fdd-0e02-4066-9210-93d805b6fe81","Type":"ContainerStarted","Data":"7e9625b3628f291ecaa686da104d719695bd8c46eb46d08f9eccab27a2013627"} Sep 29 18:56:56 crc kubenswrapper[4792]: I0929 18:56:56.190331 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-hr4cm" event={"ID":"716c5fdd-0e02-4066-9210-93d805b6fe81","Type":"ContainerStarted","Data":"c3a44c0899a9afeaa74bb22565c3f9514603ce1b83f9794539f677d067785925"} Sep 29 18:56:56 crc kubenswrapper[4792]: I0929 18:56:56.195270 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:56:56Z is after 2025-08-24T17:21:41Z" Sep 29 18:56:56 crc kubenswrapper[4792]: I0929 18:56:56.207185 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-4gmtk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b84b9e91-b50e-4271-bfc8-be15652128c5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lc999\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:55Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-4gmtk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:56:56Z is after 2025-08-24T17:21:41Z" Sep 29 18:56:56 crc kubenswrapper[4792]: I0929 18:56:56.219416 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"49187618-8fed-4b0f-bdf8-800408f708fc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://325b543480e9e1abd49c6ce98398a79ef51983b8035774b2e88447ee547733c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://12d3875b8db9620798f766024b1bc43b78759f42e467b67aaf87f0b0154a8fad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://100ab44da711fddded7f88aa053b6a47d1c8302557d9ae6a56d4f744140e34bd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://af34e705a941f92c031edf3d214a902640010036401914f60e598a46043d5eb3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:29Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:56:56Z is after 2025-08-24T17:21:41Z" Sep 29 18:56:56 crc kubenswrapper[4792]: I0929 18:56:56.230098 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:52Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:52Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cb29207afd9a5fb06242890aaf6d32f2f789cbf824b0246706e7214486ac529c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:56:56Z is after 2025-08-24T17:21:41Z" Sep 29 18:56:56 crc kubenswrapper[4792]: I0929 18:56:56.240946 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0ae66548-086e-4ca9-bd6f-281ce46e7557\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b27d8e307d9f6545acd48d9a838dc98fec84ca2e48b357966af22144b8cd415f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://305645f1f10b20984067c3d0d32bc9a5936e191faecff2bb494be005fc471c65\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:53Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-p5q59\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:56:56Z is after 2025-08-24T17:21:41Z" Sep 29 18:56:56 crc kubenswrapper[4792]: I0929 18:56:56.254820 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-rqbjv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"67c58ee5-e056-4e3e-91ed-a116350f2408\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:53Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:53Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:53Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ms9xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cdd799a430b3a444b7ba74ae8c285de28790049390a462485812fe117f9dfbe8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cdd799a430b3a444b7ba74ae8c285de28790049390a462485812fe117f9dfbe8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T18:56:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ms9xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1d779fed3cf67ba40d6664f26d829858ec14749c48c09678b73d7fb8fe73c827\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ms9xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ms9xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ms9xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ms9xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ms9xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:53Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-rqbjv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:56:56Z is after 2025-08-24T17:21:41Z" Sep 29 18:56:56 crc kubenswrapper[4792]: I0929 18:56:56.256750 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:56:56 crc kubenswrapper[4792]: I0929 18:56:56.256783 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:56:56 crc kubenswrapper[4792]: I0929 18:56:56.256796 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:56:56 crc kubenswrapper[4792]: I0929 18:56:56.256812 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:56:56 crc kubenswrapper[4792]: I0929 18:56:56.256823 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:56:56Z","lastTransitionTime":"2025-09-29T18:56:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:56:56 crc kubenswrapper[4792]: I0929 18:56:56.268958 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"10bc9cb6-78d3-43a6-8276-db1cb1c116e0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:29Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:29Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://238035b6ad975064a44e7e9e760ae9f09c9ff2735ecc69f65df3fc3176f6d3b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://659f798faed7d5c35bf7959b8e42a37f2289854714d513962716cb73a0674a27\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://38d29fa86782b007a60cbc1ea8f2ccc250dabb38c84eb61931b05fbc170e6538\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0fdb8c0687f0a5ce31078f6d7a9b643c41ad23199eff4b2878403ee5fd31f69f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0fdb8c0687f0a5ce31078f6d7a9b643c41ad23199eff4b2878403ee5fd31f69f\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T18:56:45Z\\\",\\\"message\\\":\\\"9 18:56:45.310775 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0929 18:56:45.310981 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI0929 18:56:45.312802 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0929 18:56:45.312836 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0929 18:56:45.312870 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0929 18:56:45.312900 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0929 18:56:45.312941 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI0929 18:56:45.312944 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0929 18:56:45.312985 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI0929 18:56:45.313033 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI0929 18:56:45.313121 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-682747971/tls.crt::/tmp/serving-cert-682747971/tls.key\\\\\\\"\\\\nI0929 18:56:45.313163 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-682747971/tls.crt::/tmp/serving-cert-682747971/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1759172204\\\\\\\\\\\\\\\" (2025-09-29 18:56:43 +0000 UTC to 2025-10-29 18:56:44 +0000 UTC (now=2025-09-29 18:56:45.313121069 +0000 UTC))\\\\\\\"\\\\nF0929 18:56:45.313206 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:44Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://07171a8c3c7812c016ee534ff1332f697d2b2cdfa70fc9d94ae6a5f312e0e433\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:32Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6e733b73d0293211bf2e8e97dc7db49c34e8ac1ef1e6e19013183d8518345959\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6e733b73d0293211bf2e8e97dc7db49c34e8ac1ef1e6e19013183d8518345959\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T18:56:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:29Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:56:56Z is after 2025-08-24T17:21:41Z" Sep 29 18:56:56 crc kubenswrapper[4792]: I0929 18:56:56.280387 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ba8b9fef5faf6504a0e363f092cc9f60b03723775a0a0624b6302b3dac43a7ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:56:56Z is after 2025-08-24T17:21:41Z" Sep 29 18:56:56 crc kubenswrapper[4792]: I0929 18:56:56.290028 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8067c4cf598ce2d361c4a76b51ef3cf14d1fc84ad7ee193d76e20cd980f197be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://693ef3ee15f0b8762a16adc20435397e073dea4b0028f4175899cb956eaab303\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:56:56Z is after 2025-08-24T17:21:41Z" Sep 29 18:56:56 crc kubenswrapper[4792]: I0929 18:56:56.319351 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-hr4cm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"716c5fdd-0e02-4066-9210-93d805b6fe81\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:53Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:53Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d0516004c2ea4a5711f5e00dcfa01fd5c8d0c0d0d60fd31b0d7da586cd83a820\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d0516004c2ea4a5711f5e00dcfa01fd5c8d0c0d0d60fd31b0d7da586cd83a820\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T18:56:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:53Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-hr4cm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:56:56Z is after 2025-08-24T17:21:41Z" Sep 29 18:56:56 crc kubenswrapper[4792]: I0929 18:56:56.332143 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/node-ca-4gmtk" Sep 29 18:56:56 crc kubenswrapper[4792]: W0929 18:56:56.351738 4792 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb84b9e91_b50e_4271_bfc8_be15652128c5.slice/crio-31cbef20af091ecc9f41bc407495a6cfad370ee5e9431390afbe98bc67997fb9 WatchSource:0}: Error finding container 31cbef20af091ecc9f41bc407495a6cfad370ee5e9431390afbe98bc67997fb9: Status 404 returned error can't find the container with id 31cbef20af091ecc9f41bc407495a6cfad370ee5e9431390afbe98bc67997fb9 Sep 29 18:56:56 crc kubenswrapper[4792]: I0929 18:56:56.354069 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-5hwvp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"100876d3-2539-47f1-91fa-0f91456ccac1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3ff4d11cfba0349ddf3f5a14c525716cfdff95c71698634e8feca328d6e41e2d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfblz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:53Z\\\"}}\" for pod \"openshift-multus\"/\"multus-5hwvp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:56:56Z is after 2025-08-24T17:21:41Z" Sep 29 18:56:56 crc kubenswrapper[4792]: I0929 18:56:56.364104 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:56:56 crc kubenswrapper[4792]: I0929 18:56:56.364140 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:56:56 crc kubenswrapper[4792]: I0929 18:56:56.364149 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:56:56 crc kubenswrapper[4792]: I0929 18:56:56.364163 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:56:56 crc kubenswrapper[4792]: I0929 18:56:56.364173 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:56:56Z","lastTransitionTime":"2025-09-29T18:56:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:56:56 crc kubenswrapper[4792]: I0929 18:56:56.366693 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:56:56Z is after 2025-08-24T17:21:41Z" Sep 29 18:56:56 crc kubenswrapper[4792]: I0929 18:56:56.382003 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:56:56Z is after 2025-08-24T17:21:41Z" Sep 29 18:56:56 crc kubenswrapper[4792]: I0929 18:56:56.394155 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-c228l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc16dcda-372e-4aac-8c12-148bf93e8783\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://af2529526423852e215c3201a4d8807a880e07e9cf71d593f304a4a3c99900eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6mz5l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:52Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-c228l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:56:56Z is after 2025-08-24T17:21:41Z" Sep 29 18:56:56 crc kubenswrapper[4792]: I0929 18:56:56.467735 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:56:56 crc kubenswrapper[4792]: I0929 18:56:56.467760 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:56:56 crc kubenswrapper[4792]: I0929 18:56:56.467769 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:56:56 crc kubenswrapper[4792]: I0929 18:56:56.467781 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:56:56 crc kubenswrapper[4792]: I0929 18:56:56.467790 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:56:56Z","lastTransitionTime":"2025-09-29T18:56:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:56:56 crc kubenswrapper[4792]: I0929 18:56:56.570192 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:56:56 crc kubenswrapper[4792]: I0929 18:56:56.570231 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:56:56 crc kubenswrapper[4792]: I0929 18:56:56.570243 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:56:56 crc kubenswrapper[4792]: I0929 18:56:56.570260 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:56:56 crc kubenswrapper[4792]: I0929 18:56:56.570271 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:56:56Z","lastTransitionTime":"2025-09-29T18:56:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:56:56 crc kubenswrapper[4792]: I0929 18:56:56.604644 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 18:56:56 crc kubenswrapper[4792]: E0929 18:56:56.604777 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 18:57:04.604759436 +0000 UTC m=+36.598066832 (durationBeforeRetry 8s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 18:56:56 crc kubenswrapper[4792]: I0929 18:56:56.604915 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 18:56:56 crc kubenswrapper[4792]: E0929 18:56:56.605012 4792 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Sep 29 18:56:56 crc kubenswrapper[4792]: E0929 18:56:56.605055 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-09-29 18:57:04.605047093 +0000 UTC m=+36.598354489 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Sep 29 18:56:56 crc kubenswrapper[4792]: I0929 18:56:56.673676 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:56:56 crc kubenswrapper[4792]: I0929 18:56:56.673708 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:56:56 crc kubenswrapper[4792]: I0929 18:56:56.673716 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:56:56 crc kubenswrapper[4792]: I0929 18:56:56.673730 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:56:56 crc kubenswrapper[4792]: I0929 18:56:56.673740 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:56:56Z","lastTransitionTime":"2025-09-29T18:56:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:56:56 crc kubenswrapper[4792]: I0929 18:56:56.705551 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 18:56:56 crc kubenswrapper[4792]: I0929 18:56:56.705899 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 18:56:56 crc kubenswrapper[4792]: I0929 18:56:56.706000 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 18:56:56 crc kubenswrapper[4792]: E0929 18:56:56.705682 4792 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Sep 29 18:56:56 crc kubenswrapper[4792]: E0929 18:56:56.706278 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-09-29 18:57:04.706262964 +0000 UTC m=+36.699570360 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Sep 29 18:56:56 crc kubenswrapper[4792]: E0929 18:56:56.706672 4792 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Sep 29 18:56:56 crc kubenswrapper[4792]: E0929 18:56:56.706771 4792 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Sep 29 18:56:56 crc kubenswrapper[4792]: E0929 18:56:56.706874 4792 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 29 18:56:56 crc kubenswrapper[4792]: E0929 18:56:56.706976 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-09-29 18:57:04.706966622 +0000 UTC m=+36.700274018 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 29 18:56:56 crc kubenswrapper[4792]: E0929 18:56:56.706138 4792 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Sep 29 18:56:56 crc kubenswrapper[4792]: E0929 18:56:56.707100 4792 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Sep 29 18:56:56 crc kubenswrapper[4792]: E0929 18:56:56.707158 4792 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 29 18:56:56 crc kubenswrapper[4792]: E0929 18:56:56.707228 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-09-29 18:57:04.707220509 +0000 UTC m=+36.700527905 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 29 18:56:56 crc kubenswrapper[4792]: I0929 18:56:56.775575 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:56:56 crc kubenswrapper[4792]: I0929 18:56:56.775787 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:56:56 crc kubenswrapper[4792]: I0929 18:56:56.775880 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:56:56 crc kubenswrapper[4792]: I0929 18:56:56.775950 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:56:56 crc kubenswrapper[4792]: I0929 18:56:56.776008 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:56:56Z","lastTransitionTime":"2025-09-29T18:56:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:56:56 crc kubenswrapper[4792]: I0929 18:56:56.878395 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:56:56 crc kubenswrapper[4792]: I0929 18:56:56.878426 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:56:56 crc kubenswrapper[4792]: I0929 18:56:56.878435 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:56:56 crc kubenswrapper[4792]: I0929 18:56:56.878449 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:56:56 crc kubenswrapper[4792]: I0929 18:56:56.878463 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:56:56Z","lastTransitionTime":"2025-09-29T18:56:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:56:56 crc kubenswrapper[4792]: I0929 18:56:56.981092 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:56:56 crc kubenswrapper[4792]: I0929 18:56:56.981155 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:56:56 crc kubenswrapper[4792]: I0929 18:56:56.981173 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:56:56 crc kubenswrapper[4792]: I0929 18:56:56.981197 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:56:56 crc kubenswrapper[4792]: I0929 18:56:56.981213 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:56:56Z","lastTransitionTime":"2025-09-29T18:56:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:56:57 crc kubenswrapper[4792]: I0929 18:56:57.014912 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 18:56:57 crc kubenswrapper[4792]: I0929 18:56:57.014944 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 18:56:57 crc kubenswrapper[4792]: I0929 18:56:57.014956 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 18:56:57 crc kubenswrapper[4792]: E0929 18:56:57.015085 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 18:56:57 crc kubenswrapper[4792]: E0929 18:56:57.015218 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 18:56:57 crc kubenswrapper[4792]: E0929 18:56:57.015319 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 18:56:57 crc kubenswrapper[4792]: I0929 18:56:57.084294 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:56:57 crc kubenswrapper[4792]: I0929 18:56:57.084337 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:56:57 crc kubenswrapper[4792]: I0929 18:56:57.084350 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:56:57 crc kubenswrapper[4792]: I0929 18:56:57.084367 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:56:57 crc kubenswrapper[4792]: I0929 18:56:57.084378 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:56:57Z","lastTransitionTime":"2025-09-29T18:56:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:56:57 crc kubenswrapper[4792]: I0929 18:56:57.187113 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:56:57 crc kubenswrapper[4792]: I0929 18:56:57.187156 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:56:57 crc kubenswrapper[4792]: I0929 18:56:57.187168 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:56:57 crc kubenswrapper[4792]: I0929 18:56:57.187189 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:56:57 crc kubenswrapper[4792]: I0929 18:56:57.187207 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:56:57Z","lastTransitionTime":"2025-09-29T18:56:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:56:57 crc kubenswrapper[4792]: I0929 18:56:57.196362 4792 generic.go:334] "Generic (PLEG): container finished" podID="67c58ee5-e056-4e3e-91ed-a116350f2408" containerID="1d779fed3cf67ba40d6664f26d829858ec14749c48c09678b73d7fb8fe73c827" exitCode=0 Sep 29 18:56:57 crc kubenswrapper[4792]: I0929 18:56:57.196437 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-rqbjv" event={"ID":"67c58ee5-e056-4e3e-91ed-a116350f2408","Type":"ContainerDied","Data":"1d779fed3cf67ba40d6664f26d829858ec14749c48c09678b73d7fb8fe73c827"} Sep 29 18:56:57 crc kubenswrapper[4792]: I0929 18:56:57.198356 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/node-ca-4gmtk" event={"ID":"b84b9e91-b50e-4271-bfc8-be15652128c5","Type":"ContainerStarted","Data":"b32084075b7423c8211ca56595a2eb11add581b500043804cb09f13d07788bd6"} Sep 29 18:56:57 crc kubenswrapper[4792]: I0929 18:56:57.198390 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/node-ca-4gmtk" event={"ID":"b84b9e91-b50e-4271-bfc8-be15652128c5","Type":"ContainerStarted","Data":"31cbef20af091ecc9f41bc407495a6cfad370ee5e9431390afbe98bc67997fb9"} Sep 29 18:56:57 crc kubenswrapper[4792]: I0929 18:56:57.203100 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-hr4cm" event={"ID":"716c5fdd-0e02-4066-9210-93d805b6fe81","Type":"ContainerStarted","Data":"3408e50d82d1e7f50d9cd4fb2b4e078059bbc4daba10ca93c3cab56d4fe190be"} Sep 29 18:56:57 crc kubenswrapper[4792]: I0929 18:56:57.221008 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-rqbjv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"67c58ee5-e056-4e3e-91ed-a116350f2408\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:53Z\\\",\\\"message\\\":\\\"containers with incomplete status: [bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:53Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:53Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ms9xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cdd799a430b3a444b7ba74ae8c285de28790049390a462485812fe117f9dfbe8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cdd799a430b3a444b7ba74ae8c285de28790049390a462485812fe117f9dfbe8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T18:56:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ms9xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1d779fed3cf67ba40d6664f26d829858ec14749c48c09678b73d7fb8fe73c827\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1d779fed3cf67ba40d6664f26d829858ec14749c48c09678b73d7fb8fe73c827\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T18:56:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ms9xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ms9xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ms9xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ms9xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ms9xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:53Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-rqbjv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:56:57Z is after 2025-08-24T17:21:41Z" Sep 29 18:56:57 crc kubenswrapper[4792]: I0929 18:56:57.238393 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"49187618-8fed-4b0f-bdf8-800408f708fc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://325b543480e9e1abd49c6ce98398a79ef51983b8035774b2e88447ee547733c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://12d3875b8db9620798f766024b1bc43b78759f42e467b67aaf87f0b0154a8fad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://100ab44da711fddded7f88aa053b6a47d1c8302557d9ae6a56d4f744140e34bd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://af34e705a941f92c031edf3d214a902640010036401914f60e598a46043d5eb3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:29Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:56:57Z is after 2025-08-24T17:21:41Z" Sep 29 18:56:57 crc kubenswrapper[4792]: I0929 18:56:57.250256 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:52Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:52Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cb29207afd9a5fb06242890aaf6d32f2f789cbf824b0246706e7214486ac529c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:56:57Z is after 2025-08-24T17:21:41Z" Sep 29 18:56:57 crc kubenswrapper[4792]: I0929 18:56:57.265837 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0ae66548-086e-4ca9-bd6f-281ce46e7557\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b27d8e307d9f6545acd48d9a838dc98fec84ca2e48b357966af22144b8cd415f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://305645f1f10b20984067c3d0d32bc9a5936e191faecff2bb494be005fc471c65\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:53Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-p5q59\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:56:57Z is after 2025-08-24T17:21:41Z" Sep 29 18:56:57 crc kubenswrapper[4792]: I0929 18:56:57.282972 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-hr4cm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"716c5fdd-0e02-4066-9210-93d805b6fe81\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:53Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:53Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d0516004c2ea4a5711f5e00dcfa01fd5c8d0c0d0d60fd31b0d7da586cd83a820\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d0516004c2ea4a5711f5e00dcfa01fd5c8d0c0d0d60fd31b0d7da586cd83a820\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T18:56:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:53Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-hr4cm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:56:57Z is after 2025-08-24T17:21:41Z" Sep 29 18:56:57 crc kubenswrapper[4792]: I0929 18:56:57.290624 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:56:57 crc kubenswrapper[4792]: I0929 18:56:57.290647 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:56:57 crc kubenswrapper[4792]: I0929 18:56:57.290657 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:56:57 crc kubenswrapper[4792]: I0929 18:56:57.290672 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:56:57 crc kubenswrapper[4792]: I0929 18:56:57.290682 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:56:57Z","lastTransitionTime":"2025-09-29T18:56:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:56:57 crc kubenswrapper[4792]: I0929 18:56:57.295110 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-5hwvp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"100876d3-2539-47f1-91fa-0f91456ccac1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3ff4d11cfba0349ddf3f5a14c525716cfdff95c71698634e8feca328d6e41e2d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfblz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:53Z\\\"}}\" for pod \"openshift-multus\"/\"multus-5hwvp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:56:57Z is after 2025-08-24T17:21:41Z" Sep 29 18:56:57 crc kubenswrapper[4792]: I0929 18:56:57.310617 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"10bc9cb6-78d3-43a6-8276-db1cb1c116e0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:29Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:29Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://238035b6ad975064a44e7e9e760ae9f09c9ff2735ecc69f65df3fc3176f6d3b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://659f798faed7d5c35bf7959b8e42a37f2289854714d513962716cb73a0674a27\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://38d29fa86782b007a60cbc1ea8f2ccc250dabb38c84eb61931b05fbc170e6538\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0fdb8c0687f0a5ce31078f6d7a9b643c41ad23199eff4b2878403ee5fd31f69f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0fdb8c0687f0a5ce31078f6d7a9b643c41ad23199eff4b2878403ee5fd31f69f\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T18:56:45Z\\\",\\\"message\\\":\\\"9 18:56:45.310775 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0929 18:56:45.310981 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI0929 18:56:45.312802 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0929 18:56:45.312836 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0929 18:56:45.312870 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0929 18:56:45.312900 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0929 18:56:45.312941 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI0929 18:56:45.312944 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0929 18:56:45.312985 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI0929 18:56:45.313033 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI0929 18:56:45.313121 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-682747971/tls.crt::/tmp/serving-cert-682747971/tls.key\\\\\\\"\\\\nI0929 18:56:45.313163 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-682747971/tls.crt::/tmp/serving-cert-682747971/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1759172204\\\\\\\\\\\\\\\" (2025-09-29 18:56:43 +0000 UTC to 2025-10-29 18:56:44 +0000 UTC (now=2025-09-29 18:56:45.313121069 +0000 UTC))\\\\\\\"\\\\nF0929 18:56:45.313206 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:44Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://07171a8c3c7812c016ee534ff1332f697d2b2cdfa70fc9d94ae6a5f312e0e433\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:32Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6e733b73d0293211bf2e8e97dc7db49c34e8ac1ef1e6e19013183d8518345959\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6e733b73d0293211bf2e8e97dc7db49c34e8ac1ef1e6e19013183d8518345959\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T18:56:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:29Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:56:57Z is after 2025-08-24T17:21:41Z" Sep 29 18:56:57 crc kubenswrapper[4792]: I0929 18:56:57.323316 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ba8b9fef5faf6504a0e363f092cc9f60b03723775a0a0624b6302b3dac43a7ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:56:57Z is after 2025-08-24T17:21:41Z" Sep 29 18:56:57 crc kubenswrapper[4792]: I0929 18:56:57.335817 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8067c4cf598ce2d361c4a76b51ef3cf14d1fc84ad7ee193d76e20cd980f197be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://693ef3ee15f0b8762a16adc20435397e073dea4b0028f4175899cb956eaab303\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:56:57Z is after 2025-08-24T17:21:41Z" Sep 29 18:56:57 crc kubenswrapper[4792]: I0929 18:56:57.345463 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-c228l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc16dcda-372e-4aac-8c12-148bf93e8783\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://af2529526423852e215c3201a4d8807a880e07e9cf71d593f304a4a3c99900eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6mz5l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:52Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-c228l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:56:57Z is after 2025-08-24T17:21:41Z" Sep 29 18:56:57 crc kubenswrapper[4792]: I0929 18:56:57.356940 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:56:57Z is after 2025-08-24T17:21:41Z" Sep 29 18:56:57 crc kubenswrapper[4792]: I0929 18:56:57.368021 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:56:57Z is after 2025-08-24T17:21:41Z" Sep 29 18:56:57 crc kubenswrapper[4792]: I0929 18:56:57.377250 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-4gmtk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b84b9e91-b50e-4271-bfc8-be15652128c5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lc999\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:55Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-4gmtk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:56:57Z is after 2025-08-24T17:21:41Z" Sep 29 18:56:57 crc kubenswrapper[4792]: I0929 18:56:57.391550 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:56:57Z is after 2025-08-24T17:21:41Z" Sep 29 18:56:57 crc kubenswrapper[4792]: I0929 18:56:57.393667 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:56:57 crc kubenswrapper[4792]: I0929 18:56:57.393692 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:56:57 crc kubenswrapper[4792]: I0929 18:56:57.393726 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:56:57 crc kubenswrapper[4792]: I0929 18:56:57.393755 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:56:57 crc kubenswrapper[4792]: I0929 18:56:57.393765 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:56:57Z","lastTransitionTime":"2025-09-29T18:56:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:56:57 crc kubenswrapper[4792]: I0929 18:56:57.406759 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"49187618-8fed-4b0f-bdf8-800408f708fc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://325b543480e9e1abd49c6ce98398a79ef51983b8035774b2e88447ee547733c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://12d3875b8db9620798f766024b1bc43b78759f42e467b67aaf87f0b0154a8fad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://100ab44da711fddded7f88aa053b6a47d1c8302557d9ae6a56d4f744140e34bd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://af34e705a941f92c031edf3d214a902640010036401914f60e598a46043d5eb3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:29Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:56:57Z is after 2025-08-24T17:21:41Z" Sep 29 18:56:57 crc kubenswrapper[4792]: I0929 18:56:57.419475 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:52Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:52Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cb29207afd9a5fb06242890aaf6d32f2f789cbf824b0246706e7214486ac529c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:56:57Z is after 2025-08-24T17:21:41Z" Sep 29 18:56:57 crc kubenswrapper[4792]: I0929 18:56:57.430133 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0ae66548-086e-4ca9-bd6f-281ce46e7557\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b27d8e307d9f6545acd48d9a838dc98fec84ca2e48b357966af22144b8cd415f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://305645f1f10b20984067c3d0d32bc9a5936e191faecff2bb494be005fc471c65\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:53Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-p5q59\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:56:57Z is after 2025-08-24T17:21:41Z" Sep 29 18:56:57 crc kubenswrapper[4792]: I0929 18:56:57.446213 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-rqbjv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"67c58ee5-e056-4e3e-91ed-a116350f2408\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:53Z\\\",\\\"message\\\":\\\"containers with incomplete status: [bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:53Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:53Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ms9xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cdd799a430b3a444b7ba74ae8c285de28790049390a462485812fe117f9dfbe8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cdd799a430b3a444b7ba74ae8c285de28790049390a462485812fe117f9dfbe8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T18:56:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ms9xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1d779fed3cf67ba40d6664f26d829858ec14749c48c09678b73d7fb8fe73c827\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1d779fed3cf67ba40d6664f26d829858ec14749c48c09678b73d7fb8fe73c827\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T18:56:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ms9xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ms9xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ms9xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ms9xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ms9xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:53Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-rqbjv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:56:57Z is after 2025-08-24T17:21:41Z" Sep 29 18:56:57 crc kubenswrapper[4792]: I0929 18:56:57.460090 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"10bc9cb6-78d3-43a6-8276-db1cb1c116e0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:29Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:29Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://238035b6ad975064a44e7e9e760ae9f09c9ff2735ecc69f65df3fc3176f6d3b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://659f798faed7d5c35bf7959b8e42a37f2289854714d513962716cb73a0674a27\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://38d29fa86782b007a60cbc1ea8f2ccc250dabb38c84eb61931b05fbc170e6538\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0fdb8c0687f0a5ce31078f6d7a9b643c41ad23199eff4b2878403ee5fd31f69f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0fdb8c0687f0a5ce31078f6d7a9b643c41ad23199eff4b2878403ee5fd31f69f\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T18:56:45Z\\\",\\\"message\\\":\\\"9 18:56:45.310775 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0929 18:56:45.310981 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI0929 18:56:45.312802 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0929 18:56:45.312836 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0929 18:56:45.312870 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0929 18:56:45.312900 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0929 18:56:45.312941 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI0929 18:56:45.312944 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0929 18:56:45.312985 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI0929 18:56:45.313033 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI0929 18:56:45.313121 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-682747971/tls.crt::/tmp/serving-cert-682747971/tls.key\\\\\\\"\\\\nI0929 18:56:45.313163 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-682747971/tls.crt::/tmp/serving-cert-682747971/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1759172204\\\\\\\\\\\\\\\" (2025-09-29 18:56:43 +0000 UTC to 2025-10-29 18:56:44 +0000 UTC (now=2025-09-29 18:56:45.313121069 +0000 UTC))\\\\\\\"\\\\nF0929 18:56:45.313206 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:44Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://07171a8c3c7812c016ee534ff1332f697d2b2cdfa70fc9d94ae6a5f312e0e433\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:32Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6e733b73d0293211bf2e8e97dc7db49c34e8ac1ef1e6e19013183d8518345959\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6e733b73d0293211bf2e8e97dc7db49c34e8ac1ef1e6e19013183d8518345959\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T18:56:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:29Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:56:57Z is after 2025-08-24T17:21:41Z" Sep 29 18:56:57 crc kubenswrapper[4792]: I0929 18:56:57.473914 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ba8b9fef5faf6504a0e363f092cc9f60b03723775a0a0624b6302b3dac43a7ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:56:57Z is after 2025-08-24T17:21:41Z" Sep 29 18:56:57 crc kubenswrapper[4792]: I0929 18:56:57.486995 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8067c4cf598ce2d361c4a76b51ef3cf14d1fc84ad7ee193d76e20cd980f197be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://693ef3ee15f0b8762a16adc20435397e073dea4b0028f4175899cb956eaab303\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:56:57Z is after 2025-08-24T17:21:41Z" Sep 29 18:56:57 crc kubenswrapper[4792]: I0929 18:56:57.496280 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:56:57 crc kubenswrapper[4792]: I0929 18:56:57.496307 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:56:57 crc kubenswrapper[4792]: I0929 18:56:57.496314 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:56:57 crc kubenswrapper[4792]: I0929 18:56:57.496329 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:56:57 crc kubenswrapper[4792]: I0929 18:56:57.496337 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:56:57Z","lastTransitionTime":"2025-09-29T18:56:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:56:57 crc kubenswrapper[4792]: I0929 18:56:57.506002 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-hr4cm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"716c5fdd-0e02-4066-9210-93d805b6fe81\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:53Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:53Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d0516004c2ea4a5711f5e00dcfa01fd5c8d0c0d0d60fd31b0d7da586cd83a820\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d0516004c2ea4a5711f5e00dcfa01fd5c8d0c0d0d60fd31b0d7da586cd83a820\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T18:56:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:53Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-hr4cm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:56:57Z is after 2025-08-24T17:21:41Z" Sep 29 18:56:57 crc kubenswrapper[4792]: I0929 18:56:57.521228 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-5hwvp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"100876d3-2539-47f1-91fa-0f91456ccac1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3ff4d11cfba0349ddf3f5a14c525716cfdff95c71698634e8feca328d6e41e2d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfblz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:53Z\\\"}}\" for pod \"openshift-multus\"/\"multus-5hwvp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:56:57Z is after 2025-08-24T17:21:41Z" Sep 29 18:56:57 crc kubenswrapper[4792]: I0929 18:56:57.532491 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:56:57Z is after 2025-08-24T17:21:41Z" Sep 29 18:56:57 crc kubenswrapper[4792]: I0929 18:56:57.547427 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:56:57Z is after 2025-08-24T17:21:41Z" Sep 29 18:56:57 crc kubenswrapper[4792]: I0929 18:56:57.558152 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-c228l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc16dcda-372e-4aac-8c12-148bf93e8783\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://af2529526423852e215c3201a4d8807a880e07e9cf71d593f304a4a3c99900eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6mz5l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:52Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-c228l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:56:57Z is after 2025-08-24T17:21:41Z" Sep 29 18:56:57 crc kubenswrapper[4792]: I0929 18:56:57.572662 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:56:57Z is after 2025-08-24T17:21:41Z" Sep 29 18:56:57 crc kubenswrapper[4792]: I0929 18:56:57.580430 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-4gmtk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b84b9e91-b50e-4271-bfc8-be15652128c5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b32084075b7423c8211ca56595a2eb11add581b500043804cb09f13d07788bd6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lc999\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:55Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-4gmtk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:56:57Z is after 2025-08-24T17:21:41Z" Sep 29 18:56:57 crc kubenswrapper[4792]: I0929 18:56:57.597886 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:56:57 crc kubenswrapper[4792]: I0929 18:56:57.597913 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:56:57 crc kubenswrapper[4792]: I0929 18:56:57.597924 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:56:57 crc kubenswrapper[4792]: I0929 18:56:57.597940 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:56:57 crc kubenswrapper[4792]: I0929 18:56:57.597950 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:56:57Z","lastTransitionTime":"2025-09-29T18:56:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:56:57 crc kubenswrapper[4792]: I0929 18:56:57.700580 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:56:57 crc kubenswrapper[4792]: I0929 18:56:57.700625 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:56:57 crc kubenswrapper[4792]: I0929 18:56:57.700640 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:56:57 crc kubenswrapper[4792]: I0929 18:56:57.700659 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:56:57 crc kubenswrapper[4792]: I0929 18:56:57.700681 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:56:57Z","lastTransitionTime":"2025-09-29T18:56:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:56:57 crc kubenswrapper[4792]: I0929 18:56:57.803095 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:56:57 crc kubenswrapper[4792]: I0929 18:56:57.803136 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:56:57 crc kubenswrapper[4792]: I0929 18:56:57.803149 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:56:57 crc kubenswrapper[4792]: I0929 18:56:57.803165 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:56:57 crc kubenswrapper[4792]: I0929 18:56:57.803180 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:56:57Z","lastTransitionTime":"2025-09-29T18:56:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:56:57 crc kubenswrapper[4792]: I0929 18:56:57.905604 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:56:57 crc kubenswrapper[4792]: I0929 18:56:57.905639 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:56:57 crc kubenswrapper[4792]: I0929 18:56:57.905656 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:56:57 crc kubenswrapper[4792]: I0929 18:56:57.905675 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:56:57 crc kubenswrapper[4792]: I0929 18:56:57.905687 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:56:57Z","lastTransitionTime":"2025-09-29T18:56:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:56:58 crc kubenswrapper[4792]: I0929 18:56:58.007818 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:56:58 crc kubenswrapper[4792]: I0929 18:56:58.007865 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:56:58 crc kubenswrapper[4792]: I0929 18:56:58.007875 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:56:58 crc kubenswrapper[4792]: I0929 18:56:58.007890 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:56:58 crc kubenswrapper[4792]: I0929 18:56:58.007901 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:56:58Z","lastTransitionTime":"2025-09-29T18:56:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:56:58 crc kubenswrapper[4792]: I0929 18:56:58.110722 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:56:58 crc kubenswrapper[4792]: I0929 18:56:58.110762 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:56:58 crc kubenswrapper[4792]: I0929 18:56:58.110777 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:56:58 crc kubenswrapper[4792]: I0929 18:56:58.110794 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:56:58 crc kubenswrapper[4792]: I0929 18:56:58.110806 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:56:58Z","lastTransitionTime":"2025-09-29T18:56:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:56:58 crc kubenswrapper[4792]: I0929 18:56:58.207463 4792 generic.go:334] "Generic (PLEG): container finished" podID="67c58ee5-e056-4e3e-91ed-a116350f2408" containerID="3094a1172df2fd98e699c4d368a14584e51bce43389c9c6432e24e78d460a3eb" exitCode=0 Sep 29 18:56:58 crc kubenswrapper[4792]: I0929 18:56:58.207504 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-rqbjv" event={"ID":"67c58ee5-e056-4e3e-91ed-a116350f2408","Type":"ContainerDied","Data":"3094a1172df2fd98e699c4d368a14584e51bce43389c9c6432e24e78d460a3eb"} Sep 29 18:56:58 crc kubenswrapper[4792]: I0929 18:56:58.216212 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:56:58 crc kubenswrapper[4792]: I0929 18:56:58.216249 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:56:58 crc kubenswrapper[4792]: I0929 18:56:58.216262 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:56:58 crc kubenswrapper[4792]: I0929 18:56:58.216285 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:56:58 crc kubenswrapper[4792]: I0929 18:56:58.216296 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:56:58Z","lastTransitionTime":"2025-09-29T18:56:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:56:58 crc kubenswrapper[4792]: I0929 18:56:58.230676 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"10bc9cb6-78d3-43a6-8276-db1cb1c116e0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:29Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:29Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://238035b6ad975064a44e7e9e760ae9f09c9ff2735ecc69f65df3fc3176f6d3b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://659f798faed7d5c35bf7959b8e42a37f2289854714d513962716cb73a0674a27\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://38d29fa86782b007a60cbc1ea8f2ccc250dabb38c84eb61931b05fbc170e6538\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0fdb8c0687f0a5ce31078f6d7a9b643c41ad23199eff4b2878403ee5fd31f69f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0fdb8c0687f0a5ce31078f6d7a9b643c41ad23199eff4b2878403ee5fd31f69f\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T18:56:45Z\\\",\\\"message\\\":\\\"9 18:56:45.310775 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0929 18:56:45.310981 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI0929 18:56:45.312802 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0929 18:56:45.312836 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0929 18:56:45.312870 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0929 18:56:45.312900 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0929 18:56:45.312941 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI0929 18:56:45.312944 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0929 18:56:45.312985 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI0929 18:56:45.313033 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI0929 18:56:45.313121 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-682747971/tls.crt::/tmp/serving-cert-682747971/tls.key\\\\\\\"\\\\nI0929 18:56:45.313163 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-682747971/tls.crt::/tmp/serving-cert-682747971/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1759172204\\\\\\\\\\\\\\\" (2025-09-29 18:56:43 +0000 UTC to 2025-10-29 18:56:44 +0000 UTC (now=2025-09-29 18:56:45.313121069 +0000 UTC))\\\\\\\"\\\\nF0929 18:56:45.313206 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:44Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://07171a8c3c7812c016ee534ff1332f697d2b2cdfa70fc9d94ae6a5f312e0e433\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:32Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6e733b73d0293211bf2e8e97dc7db49c34e8ac1ef1e6e19013183d8518345959\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6e733b73d0293211bf2e8e97dc7db49c34e8ac1ef1e6e19013183d8518345959\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T18:56:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:29Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:56:58Z is after 2025-08-24T17:21:41Z" Sep 29 18:56:58 crc kubenswrapper[4792]: I0929 18:56:58.242720 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ba8b9fef5faf6504a0e363f092cc9f60b03723775a0a0624b6302b3dac43a7ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:56:58Z is after 2025-08-24T17:21:41Z" Sep 29 18:56:58 crc kubenswrapper[4792]: I0929 18:56:58.255276 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8067c4cf598ce2d361c4a76b51ef3cf14d1fc84ad7ee193d76e20cd980f197be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://693ef3ee15f0b8762a16adc20435397e073dea4b0028f4175899cb956eaab303\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:56:58Z is after 2025-08-24T17:21:41Z" Sep 29 18:56:58 crc kubenswrapper[4792]: I0929 18:56:58.273243 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-hr4cm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"716c5fdd-0e02-4066-9210-93d805b6fe81\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:53Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:53Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d0516004c2ea4a5711f5e00dcfa01fd5c8d0c0d0d60fd31b0d7da586cd83a820\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d0516004c2ea4a5711f5e00dcfa01fd5c8d0c0d0d60fd31b0d7da586cd83a820\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T18:56:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:53Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-hr4cm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:56:58Z is after 2025-08-24T17:21:41Z" Sep 29 18:56:58 crc kubenswrapper[4792]: I0929 18:56:58.285857 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-5hwvp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"100876d3-2539-47f1-91fa-0f91456ccac1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3ff4d11cfba0349ddf3f5a14c525716cfdff95c71698634e8feca328d6e41e2d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfblz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:53Z\\\"}}\" for pod \"openshift-multus\"/\"multus-5hwvp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:56:58Z is after 2025-08-24T17:21:41Z" Sep 29 18:56:58 crc kubenswrapper[4792]: I0929 18:56:58.298082 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:56:58Z is after 2025-08-24T17:21:41Z" Sep 29 18:56:58 crc kubenswrapper[4792]: I0929 18:56:58.308537 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:56:58Z is after 2025-08-24T17:21:41Z" Sep 29 18:56:58 crc kubenswrapper[4792]: I0929 18:56:58.317061 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-c228l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc16dcda-372e-4aac-8c12-148bf93e8783\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://af2529526423852e215c3201a4d8807a880e07e9cf71d593f304a4a3c99900eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6mz5l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:52Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-c228l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:56:58Z is after 2025-08-24T17:21:41Z" Sep 29 18:56:58 crc kubenswrapper[4792]: I0929 18:56:58.319385 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:56:58 crc kubenswrapper[4792]: I0929 18:56:58.319410 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:56:58 crc kubenswrapper[4792]: I0929 18:56:58.319420 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:56:58 crc kubenswrapper[4792]: I0929 18:56:58.319436 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:56:58 crc kubenswrapper[4792]: I0929 18:56:58.319446 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:56:58Z","lastTransitionTime":"2025-09-29T18:56:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:56:58 crc kubenswrapper[4792]: I0929 18:56:58.327464 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:56:58Z is after 2025-08-24T17:21:41Z" Sep 29 18:56:58 crc kubenswrapper[4792]: I0929 18:56:58.337729 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-4gmtk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b84b9e91-b50e-4271-bfc8-be15652128c5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b32084075b7423c8211ca56595a2eb11add581b500043804cb09f13d07788bd6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lc999\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:55Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-4gmtk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:56:58Z is after 2025-08-24T17:21:41Z" Sep 29 18:56:58 crc kubenswrapper[4792]: I0929 18:56:58.348197 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"49187618-8fed-4b0f-bdf8-800408f708fc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://325b543480e9e1abd49c6ce98398a79ef51983b8035774b2e88447ee547733c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://12d3875b8db9620798f766024b1bc43b78759f42e467b67aaf87f0b0154a8fad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://100ab44da711fddded7f88aa053b6a47d1c8302557d9ae6a56d4f744140e34bd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://af34e705a941f92c031edf3d214a902640010036401914f60e598a46043d5eb3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:29Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:56:58Z is after 2025-08-24T17:21:41Z" Sep 29 18:56:58 crc kubenswrapper[4792]: I0929 18:56:58.359603 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:52Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:52Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cb29207afd9a5fb06242890aaf6d32f2f789cbf824b0246706e7214486ac529c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:56:58Z is after 2025-08-24T17:21:41Z" Sep 29 18:56:58 crc kubenswrapper[4792]: I0929 18:56:58.372289 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0ae66548-086e-4ca9-bd6f-281ce46e7557\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b27d8e307d9f6545acd48d9a838dc98fec84ca2e48b357966af22144b8cd415f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://305645f1f10b20984067c3d0d32bc9a5936e191faecff2bb494be005fc471c65\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:53Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-p5q59\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:56:58Z is after 2025-08-24T17:21:41Z" Sep 29 18:56:58 crc kubenswrapper[4792]: I0929 18:56:58.388111 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-rqbjv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"67c58ee5-e056-4e3e-91ed-a116350f2408\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:53Z\\\",\\\"message\\\":\\\"containers with incomplete status: [routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:53Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:53Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ms9xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cdd799a430b3a444b7ba74ae8c285de28790049390a462485812fe117f9dfbe8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cdd799a430b3a444b7ba74ae8c285de28790049390a462485812fe117f9dfbe8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T18:56:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ms9xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1d779fed3cf67ba40d6664f26d829858ec14749c48c09678b73d7fb8fe73c827\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1d779fed3cf67ba40d6664f26d829858ec14749c48c09678b73d7fb8fe73c827\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T18:56:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ms9xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3094a1172df2fd98e699c4d368a14584e51bce43389c9c6432e24e78d460a3eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3094a1172df2fd98e699c4d368a14584e51bce43389c9c6432e24e78d460a3eb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T18:56:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ms9xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ms9xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ms9xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ms9xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:53Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-rqbjv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:56:58Z is after 2025-08-24T17:21:41Z" Sep 29 18:56:58 crc kubenswrapper[4792]: I0929 18:56:58.422252 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:56:58 crc kubenswrapper[4792]: I0929 18:56:58.422286 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:56:58 crc kubenswrapper[4792]: I0929 18:56:58.422295 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:56:58 crc kubenswrapper[4792]: I0929 18:56:58.422309 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:56:58 crc kubenswrapper[4792]: I0929 18:56:58.422318 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:56:58Z","lastTransitionTime":"2025-09-29T18:56:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:56:58 crc kubenswrapper[4792]: I0929 18:56:58.525184 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:56:58 crc kubenswrapper[4792]: I0929 18:56:58.525207 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:56:58 crc kubenswrapper[4792]: I0929 18:56:58.525215 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:56:58 crc kubenswrapper[4792]: I0929 18:56:58.525227 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:56:58 crc kubenswrapper[4792]: I0929 18:56:58.525236 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:56:58Z","lastTransitionTime":"2025-09-29T18:56:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:56:58 crc kubenswrapper[4792]: I0929 18:56:58.627069 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:56:58 crc kubenswrapper[4792]: I0929 18:56:58.627105 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:56:58 crc kubenswrapper[4792]: I0929 18:56:58.627116 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:56:58 crc kubenswrapper[4792]: I0929 18:56:58.627133 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:56:58 crc kubenswrapper[4792]: I0929 18:56:58.627145 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:56:58Z","lastTransitionTime":"2025-09-29T18:56:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:56:58 crc kubenswrapper[4792]: I0929 18:56:58.729888 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:56:58 crc kubenswrapper[4792]: I0929 18:56:58.729928 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:56:58 crc kubenswrapper[4792]: I0929 18:56:58.729939 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:56:58 crc kubenswrapper[4792]: I0929 18:56:58.729959 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:56:58 crc kubenswrapper[4792]: I0929 18:56:58.729970 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:56:58Z","lastTransitionTime":"2025-09-29T18:56:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:56:58 crc kubenswrapper[4792]: I0929 18:56:58.832041 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:56:58 crc kubenswrapper[4792]: I0929 18:56:58.832090 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:56:58 crc kubenswrapper[4792]: I0929 18:56:58.832111 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:56:58 crc kubenswrapper[4792]: I0929 18:56:58.832137 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:56:58 crc kubenswrapper[4792]: I0929 18:56:58.832159 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:56:58Z","lastTransitionTime":"2025-09-29T18:56:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:56:58 crc kubenswrapper[4792]: I0929 18:56:58.935078 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:56:58 crc kubenswrapper[4792]: I0929 18:56:58.935144 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:56:58 crc kubenswrapper[4792]: I0929 18:56:58.935166 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:56:58 crc kubenswrapper[4792]: I0929 18:56:58.935190 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:56:58 crc kubenswrapper[4792]: I0929 18:56:58.935208 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:56:58Z","lastTransitionTime":"2025-09-29T18:56:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:56:59 crc kubenswrapper[4792]: I0929 18:56:59.014901 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 18:56:59 crc kubenswrapper[4792]: I0929 18:56:59.014997 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 18:56:59 crc kubenswrapper[4792]: I0929 18:56:59.014911 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 18:56:59 crc kubenswrapper[4792]: E0929 18:56:59.015036 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 18:56:59 crc kubenswrapper[4792]: E0929 18:56:59.015141 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 18:56:59 crc kubenswrapper[4792]: E0929 18:56:59.015257 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 18:56:59 crc kubenswrapper[4792]: I0929 18:56:59.032723 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:56:59Z is after 2025-08-24T17:21:41Z" Sep 29 18:56:59 crc kubenswrapper[4792]: I0929 18:56:59.037324 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:56:59 crc kubenswrapper[4792]: I0929 18:56:59.037357 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:56:59 crc kubenswrapper[4792]: I0929 18:56:59.037368 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:56:59 crc kubenswrapper[4792]: I0929 18:56:59.037427 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:56:59 crc kubenswrapper[4792]: I0929 18:56:59.037441 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:56:59Z","lastTransitionTime":"2025-09-29T18:56:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:56:59 crc kubenswrapper[4792]: I0929 18:56:59.045220 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-4gmtk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b84b9e91-b50e-4271-bfc8-be15652128c5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b32084075b7423c8211ca56595a2eb11add581b500043804cb09f13d07788bd6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lc999\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:55Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-4gmtk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:56:59Z is after 2025-08-24T17:21:41Z" Sep 29 18:56:59 crc kubenswrapper[4792]: I0929 18:56:59.060577 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"49187618-8fed-4b0f-bdf8-800408f708fc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://325b543480e9e1abd49c6ce98398a79ef51983b8035774b2e88447ee547733c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://12d3875b8db9620798f766024b1bc43b78759f42e467b67aaf87f0b0154a8fad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://100ab44da711fddded7f88aa053b6a47d1c8302557d9ae6a56d4f744140e34bd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://af34e705a941f92c031edf3d214a902640010036401914f60e598a46043d5eb3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:29Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:56:59Z is after 2025-08-24T17:21:41Z" Sep 29 18:56:59 crc kubenswrapper[4792]: I0929 18:56:59.081889 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:52Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:52Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cb29207afd9a5fb06242890aaf6d32f2f789cbf824b0246706e7214486ac529c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:56:59Z is after 2025-08-24T17:21:41Z" Sep 29 18:56:59 crc kubenswrapper[4792]: I0929 18:56:59.097900 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0ae66548-086e-4ca9-bd6f-281ce46e7557\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b27d8e307d9f6545acd48d9a838dc98fec84ca2e48b357966af22144b8cd415f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://305645f1f10b20984067c3d0d32bc9a5936e191faecff2bb494be005fc471c65\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:53Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-p5q59\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:56:59Z is after 2025-08-24T17:21:41Z" Sep 29 18:56:59 crc kubenswrapper[4792]: I0929 18:56:59.120123 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-rqbjv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"67c58ee5-e056-4e3e-91ed-a116350f2408\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:53Z\\\",\\\"message\\\":\\\"containers with incomplete status: [routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:53Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:53Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ms9xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cdd799a430b3a444b7ba74ae8c285de28790049390a462485812fe117f9dfbe8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cdd799a430b3a444b7ba74ae8c285de28790049390a462485812fe117f9dfbe8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T18:56:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ms9xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1d779fed3cf67ba40d6664f26d829858ec14749c48c09678b73d7fb8fe73c827\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1d779fed3cf67ba40d6664f26d829858ec14749c48c09678b73d7fb8fe73c827\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T18:56:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ms9xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3094a1172df2fd98e699c4d368a14584e51bce43389c9c6432e24e78d460a3eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3094a1172df2fd98e699c4d368a14584e51bce43389c9c6432e24e78d460a3eb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T18:56:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ms9xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ms9xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ms9xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ms9xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:53Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-rqbjv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:56:59Z is after 2025-08-24T17:21:41Z" Sep 29 18:56:59 crc kubenswrapper[4792]: I0929 18:56:59.139575 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:56:59 crc kubenswrapper[4792]: I0929 18:56:59.139616 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:56:59 crc kubenswrapper[4792]: I0929 18:56:59.139628 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:56:59 crc kubenswrapper[4792]: I0929 18:56:59.139645 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:56:59 crc kubenswrapper[4792]: I0929 18:56:59.139658 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:56:59Z","lastTransitionTime":"2025-09-29T18:56:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:56:59 crc kubenswrapper[4792]: I0929 18:56:59.146418 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"10bc9cb6-78d3-43a6-8276-db1cb1c116e0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:29Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:29Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://238035b6ad975064a44e7e9e760ae9f09c9ff2735ecc69f65df3fc3176f6d3b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://659f798faed7d5c35bf7959b8e42a37f2289854714d513962716cb73a0674a27\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://38d29fa86782b007a60cbc1ea8f2ccc250dabb38c84eb61931b05fbc170e6538\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0fdb8c0687f0a5ce31078f6d7a9b643c41ad23199eff4b2878403ee5fd31f69f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0fdb8c0687f0a5ce31078f6d7a9b643c41ad23199eff4b2878403ee5fd31f69f\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T18:56:45Z\\\",\\\"message\\\":\\\"9 18:56:45.310775 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0929 18:56:45.310981 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI0929 18:56:45.312802 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0929 18:56:45.312836 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0929 18:56:45.312870 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0929 18:56:45.312900 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0929 18:56:45.312941 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI0929 18:56:45.312944 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0929 18:56:45.312985 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI0929 18:56:45.313033 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI0929 18:56:45.313121 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-682747971/tls.crt::/tmp/serving-cert-682747971/tls.key\\\\\\\"\\\\nI0929 18:56:45.313163 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-682747971/tls.crt::/tmp/serving-cert-682747971/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1759172204\\\\\\\\\\\\\\\" (2025-09-29 18:56:43 +0000 UTC to 2025-10-29 18:56:44 +0000 UTC (now=2025-09-29 18:56:45.313121069 +0000 UTC))\\\\\\\"\\\\nF0929 18:56:45.313206 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:44Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://07171a8c3c7812c016ee534ff1332f697d2b2cdfa70fc9d94ae6a5f312e0e433\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:32Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6e733b73d0293211bf2e8e97dc7db49c34e8ac1ef1e6e19013183d8518345959\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6e733b73d0293211bf2e8e97dc7db49c34e8ac1ef1e6e19013183d8518345959\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T18:56:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:29Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:56:59Z is after 2025-08-24T17:21:41Z" Sep 29 18:56:59 crc kubenswrapper[4792]: I0929 18:56:59.167262 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ba8b9fef5faf6504a0e363f092cc9f60b03723775a0a0624b6302b3dac43a7ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:56:59Z is after 2025-08-24T17:21:41Z" Sep 29 18:56:59 crc kubenswrapper[4792]: I0929 18:56:59.184075 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8067c4cf598ce2d361c4a76b51ef3cf14d1fc84ad7ee193d76e20cd980f197be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://693ef3ee15f0b8762a16adc20435397e073dea4b0028f4175899cb956eaab303\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:56:59Z is after 2025-08-24T17:21:41Z" Sep 29 18:56:59 crc kubenswrapper[4792]: I0929 18:56:59.204896 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-hr4cm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"716c5fdd-0e02-4066-9210-93d805b6fe81\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:53Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:53Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d0516004c2ea4a5711f5e00dcfa01fd5c8d0c0d0d60fd31b0d7da586cd83a820\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d0516004c2ea4a5711f5e00dcfa01fd5c8d0c0d0d60fd31b0d7da586cd83a820\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T18:56:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:53Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-hr4cm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:56:59Z is after 2025-08-24T17:21:41Z" Sep 29 18:56:59 crc kubenswrapper[4792]: I0929 18:56:59.214722 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-hr4cm" event={"ID":"716c5fdd-0e02-4066-9210-93d805b6fe81","Type":"ContainerStarted","Data":"7138892e31e3d1949d0ae4789515fc0fd9868469eb14de1464a2f59786b85f08"} Sep 29 18:56:59 crc kubenswrapper[4792]: I0929 18:56:59.219660 4792 generic.go:334] "Generic (PLEG): container finished" podID="67c58ee5-e056-4e3e-91ed-a116350f2408" containerID="79dfa5c03ec31df7b6477646c437b7490658801c0b8f7fac5e9149e4c7a882bf" exitCode=0 Sep 29 18:56:59 crc kubenswrapper[4792]: I0929 18:56:59.219694 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-rqbjv" event={"ID":"67c58ee5-e056-4e3e-91ed-a116350f2408","Type":"ContainerDied","Data":"79dfa5c03ec31df7b6477646c437b7490658801c0b8f7fac5e9149e4c7a882bf"} Sep 29 18:56:59 crc kubenswrapper[4792]: I0929 18:56:59.221802 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-5hwvp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"100876d3-2539-47f1-91fa-0f91456ccac1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3ff4d11cfba0349ddf3f5a14c525716cfdff95c71698634e8feca328d6e41e2d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfblz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:53Z\\\"}}\" for pod \"openshift-multus\"/\"multus-5hwvp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:56:59Z is after 2025-08-24T17:21:41Z" Sep 29 18:56:59 crc kubenswrapper[4792]: I0929 18:56:59.237731 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:56:59Z is after 2025-08-24T17:21:41Z" Sep 29 18:56:59 crc kubenswrapper[4792]: I0929 18:56:59.241507 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:56:59 crc kubenswrapper[4792]: I0929 18:56:59.241532 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:56:59 crc kubenswrapper[4792]: I0929 18:56:59.241541 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:56:59 crc kubenswrapper[4792]: I0929 18:56:59.241556 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:56:59 crc kubenswrapper[4792]: I0929 18:56:59.241566 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:56:59Z","lastTransitionTime":"2025-09-29T18:56:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:56:59 crc kubenswrapper[4792]: I0929 18:56:59.251490 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:56:59Z is after 2025-08-24T17:21:41Z" Sep 29 18:56:59 crc kubenswrapper[4792]: I0929 18:56:59.261543 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-c228l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc16dcda-372e-4aac-8c12-148bf93e8783\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://af2529526423852e215c3201a4d8807a880e07e9cf71d593f304a4a3c99900eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6mz5l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:52Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-c228l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:56:59Z is after 2025-08-24T17:21:41Z" Sep 29 18:56:59 crc kubenswrapper[4792]: I0929 18:56:59.288137 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-hr4cm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"716c5fdd-0e02-4066-9210-93d805b6fe81\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:53Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:53Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d0516004c2ea4a5711f5e00dcfa01fd5c8d0c0d0d60fd31b0d7da586cd83a820\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d0516004c2ea4a5711f5e00dcfa01fd5c8d0c0d0d60fd31b0d7da586cd83a820\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T18:56:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:53Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-hr4cm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:56:59Z is after 2025-08-24T17:21:41Z" Sep 29 18:56:59 crc kubenswrapper[4792]: I0929 18:56:59.302223 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-5hwvp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"100876d3-2539-47f1-91fa-0f91456ccac1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3ff4d11cfba0349ddf3f5a14c525716cfdff95c71698634e8feca328d6e41e2d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfblz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:53Z\\\"}}\" for pod \"openshift-multus\"/\"multus-5hwvp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:56:59Z is after 2025-08-24T17:21:41Z" Sep 29 18:56:59 crc kubenswrapper[4792]: I0929 18:56:59.315399 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"10bc9cb6-78d3-43a6-8276-db1cb1c116e0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:29Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:29Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://238035b6ad975064a44e7e9e760ae9f09c9ff2735ecc69f65df3fc3176f6d3b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://659f798faed7d5c35bf7959b8e42a37f2289854714d513962716cb73a0674a27\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://38d29fa86782b007a60cbc1ea8f2ccc250dabb38c84eb61931b05fbc170e6538\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0fdb8c0687f0a5ce31078f6d7a9b643c41ad23199eff4b2878403ee5fd31f69f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0fdb8c0687f0a5ce31078f6d7a9b643c41ad23199eff4b2878403ee5fd31f69f\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T18:56:45Z\\\",\\\"message\\\":\\\"9 18:56:45.310775 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0929 18:56:45.310981 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI0929 18:56:45.312802 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0929 18:56:45.312836 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0929 18:56:45.312870 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0929 18:56:45.312900 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0929 18:56:45.312941 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI0929 18:56:45.312944 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0929 18:56:45.312985 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI0929 18:56:45.313033 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI0929 18:56:45.313121 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-682747971/tls.crt::/tmp/serving-cert-682747971/tls.key\\\\\\\"\\\\nI0929 18:56:45.313163 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-682747971/tls.crt::/tmp/serving-cert-682747971/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1759172204\\\\\\\\\\\\\\\" (2025-09-29 18:56:43 +0000 UTC to 2025-10-29 18:56:44 +0000 UTC (now=2025-09-29 18:56:45.313121069 +0000 UTC))\\\\\\\"\\\\nF0929 18:56:45.313206 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:44Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://07171a8c3c7812c016ee534ff1332f697d2b2cdfa70fc9d94ae6a5f312e0e433\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:32Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6e733b73d0293211bf2e8e97dc7db49c34e8ac1ef1e6e19013183d8518345959\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6e733b73d0293211bf2e8e97dc7db49c34e8ac1ef1e6e19013183d8518345959\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T18:56:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:29Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:56:59Z is after 2025-08-24T17:21:41Z" Sep 29 18:56:59 crc kubenswrapper[4792]: I0929 18:56:59.328348 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ba8b9fef5faf6504a0e363f092cc9f60b03723775a0a0624b6302b3dac43a7ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:56:59Z is after 2025-08-24T17:21:41Z" Sep 29 18:56:59 crc kubenswrapper[4792]: I0929 18:56:59.338065 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8067c4cf598ce2d361c4a76b51ef3cf14d1fc84ad7ee193d76e20cd980f197be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://693ef3ee15f0b8762a16adc20435397e073dea4b0028f4175899cb956eaab303\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:56:59Z is after 2025-08-24T17:21:41Z" Sep 29 18:56:59 crc kubenswrapper[4792]: I0929 18:56:59.343468 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:56:59 crc kubenswrapper[4792]: I0929 18:56:59.343506 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:56:59 crc kubenswrapper[4792]: I0929 18:56:59.343518 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:56:59 crc kubenswrapper[4792]: I0929 18:56:59.343534 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:56:59 crc kubenswrapper[4792]: I0929 18:56:59.343545 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:56:59Z","lastTransitionTime":"2025-09-29T18:56:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:56:59 crc kubenswrapper[4792]: I0929 18:56:59.346674 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-c228l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc16dcda-372e-4aac-8c12-148bf93e8783\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://af2529526423852e215c3201a4d8807a880e07e9cf71d593f304a4a3c99900eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6mz5l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:52Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-c228l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:56:59Z is after 2025-08-24T17:21:41Z" Sep 29 18:56:59 crc kubenswrapper[4792]: I0929 18:56:59.356489 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:56:59Z is after 2025-08-24T17:21:41Z" Sep 29 18:56:59 crc kubenswrapper[4792]: I0929 18:56:59.367911 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:56:59Z is after 2025-08-24T17:21:41Z" Sep 29 18:56:59 crc kubenswrapper[4792]: I0929 18:56:59.380557 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-4gmtk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b84b9e91-b50e-4271-bfc8-be15652128c5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b32084075b7423c8211ca56595a2eb11add581b500043804cb09f13d07788bd6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lc999\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:55Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-4gmtk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:56:59Z is after 2025-08-24T17:21:41Z" Sep 29 18:56:59 crc kubenswrapper[4792]: I0929 18:56:59.393871 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:56:59Z is after 2025-08-24T17:21:41Z" Sep 29 18:56:59 crc kubenswrapper[4792]: I0929 18:56:59.406761 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-rqbjv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"67c58ee5-e056-4e3e-91ed-a116350f2408\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:53Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:53Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:53Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ms9xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cdd799a430b3a444b7ba74ae8c285de28790049390a462485812fe117f9dfbe8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cdd799a430b3a444b7ba74ae8c285de28790049390a462485812fe117f9dfbe8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T18:56:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ms9xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1d779fed3cf67ba40d6664f26d829858ec14749c48c09678b73d7fb8fe73c827\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1d779fed3cf67ba40d6664f26d829858ec14749c48c09678b73d7fb8fe73c827\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T18:56:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ms9xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3094a1172df2fd98e699c4d368a14584e51bce43389c9c6432e24e78d460a3eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3094a1172df2fd98e699c4d368a14584e51bce43389c9c6432e24e78d460a3eb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T18:56:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ms9xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://79dfa5c03ec31df7b6477646c437b7490658801c0b8f7fac5e9149e4c7a882bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://79dfa5c03ec31df7b6477646c437b7490658801c0b8f7fac5e9149e4c7a882bf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T18:56:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ms9xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ms9xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ms9xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:53Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-rqbjv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:56:59Z is after 2025-08-24T17:21:41Z" Sep 29 18:56:59 crc kubenswrapper[4792]: I0929 18:56:59.417873 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"49187618-8fed-4b0f-bdf8-800408f708fc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://325b543480e9e1abd49c6ce98398a79ef51983b8035774b2e88447ee547733c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://12d3875b8db9620798f766024b1bc43b78759f42e467b67aaf87f0b0154a8fad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://100ab44da711fddded7f88aa053b6a47d1c8302557d9ae6a56d4f744140e34bd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://af34e705a941f92c031edf3d214a902640010036401914f60e598a46043d5eb3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:29Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:56:59Z is after 2025-08-24T17:21:41Z" Sep 29 18:56:59 crc kubenswrapper[4792]: I0929 18:56:59.427741 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:52Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:52Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cb29207afd9a5fb06242890aaf6d32f2f789cbf824b0246706e7214486ac529c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:56:59Z is after 2025-08-24T17:21:41Z" Sep 29 18:56:59 crc kubenswrapper[4792]: I0929 18:56:59.437522 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0ae66548-086e-4ca9-bd6f-281ce46e7557\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b27d8e307d9f6545acd48d9a838dc98fec84ca2e48b357966af22144b8cd415f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://305645f1f10b20984067c3d0d32bc9a5936e191faecff2bb494be005fc471c65\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:53Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-p5q59\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:56:59Z is after 2025-08-24T17:21:41Z" Sep 29 18:56:59 crc kubenswrapper[4792]: I0929 18:56:59.446040 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:56:59 crc kubenswrapper[4792]: I0929 18:56:59.446075 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:56:59 crc kubenswrapper[4792]: I0929 18:56:59.446084 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:56:59 crc kubenswrapper[4792]: I0929 18:56:59.446099 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:56:59 crc kubenswrapper[4792]: I0929 18:56:59.446109 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:56:59Z","lastTransitionTime":"2025-09-29T18:56:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:56:59 crc kubenswrapper[4792]: I0929 18:56:59.548294 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:56:59 crc kubenswrapper[4792]: I0929 18:56:59.548319 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:56:59 crc kubenswrapper[4792]: I0929 18:56:59.548327 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:56:59 crc kubenswrapper[4792]: I0929 18:56:59.548338 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:56:59 crc kubenswrapper[4792]: I0929 18:56:59.548354 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:56:59Z","lastTransitionTime":"2025-09-29T18:56:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:56:59 crc kubenswrapper[4792]: I0929 18:56:59.651574 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:56:59 crc kubenswrapper[4792]: I0929 18:56:59.651629 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:56:59 crc kubenswrapper[4792]: I0929 18:56:59.651646 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:56:59 crc kubenswrapper[4792]: I0929 18:56:59.651672 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:56:59 crc kubenswrapper[4792]: I0929 18:56:59.651694 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:56:59Z","lastTransitionTime":"2025-09-29T18:56:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:56:59 crc kubenswrapper[4792]: I0929 18:56:59.753866 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:56:59 crc kubenswrapper[4792]: I0929 18:56:59.753911 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:56:59 crc kubenswrapper[4792]: I0929 18:56:59.753926 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:56:59 crc kubenswrapper[4792]: I0929 18:56:59.753947 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:56:59 crc kubenswrapper[4792]: I0929 18:56:59.753959 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:56:59Z","lastTransitionTime":"2025-09-29T18:56:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:56:59 crc kubenswrapper[4792]: I0929 18:56:59.857140 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:56:59 crc kubenswrapper[4792]: I0929 18:56:59.857378 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:56:59 crc kubenswrapper[4792]: I0929 18:56:59.857484 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:56:59 crc kubenswrapper[4792]: I0929 18:56:59.857598 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:56:59 crc kubenswrapper[4792]: I0929 18:56:59.857658 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:56:59Z","lastTransitionTime":"2025-09-29T18:56:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:56:59 crc kubenswrapper[4792]: I0929 18:56:59.960108 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:56:59 crc kubenswrapper[4792]: I0929 18:56:59.960172 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:56:59 crc kubenswrapper[4792]: I0929 18:56:59.960191 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:56:59 crc kubenswrapper[4792]: I0929 18:56:59.960221 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:56:59 crc kubenswrapper[4792]: I0929 18:56:59.960241 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:56:59Z","lastTransitionTime":"2025-09-29T18:56:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:00 crc kubenswrapper[4792]: I0929 18:57:00.063355 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:00 crc kubenswrapper[4792]: I0929 18:57:00.063963 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:00 crc kubenswrapper[4792]: I0929 18:57:00.064096 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:00 crc kubenswrapper[4792]: I0929 18:57:00.064189 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:00 crc kubenswrapper[4792]: I0929 18:57:00.064267 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:00Z","lastTransitionTime":"2025-09-29T18:57:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:00 crc kubenswrapper[4792]: I0929 18:57:00.174473 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:00 crc kubenswrapper[4792]: I0929 18:57:00.174546 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:00 crc kubenswrapper[4792]: I0929 18:57:00.174558 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:00 crc kubenswrapper[4792]: I0929 18:57:00.174581 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:00 crc kubenswrapper[4792]: I0929 18:57:00.174594 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:00Z","lastTransitionTime":"2025-09-29T18:57:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:00 crc kubenswrapper[4792]: I0929 18:57:00.227775 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-rqbjv" event={"ID":"67c58ee5-e056-4e3e-91ed-a116350f2408","Type":"ContainerStarted","Data":"aed6e427a87d4a4617a9d1c9a4d37cf2f9815d1759336026545d563b1f9b6976"} Sep 29 18:57:00 crc kubenswrapper[4792]: I0929 18:57:00.253761 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0ae66548-086e-4ca9-bd6f-281ce46e7557\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b27d8e307d9f6545acd48d9a838dc98fec84ca2e48b357966af22144b8cd415f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://305645f1f10b20984067c3d0d32bc9a5936e191faecff2bb494be005fc471c65\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:53Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-p5q59\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:00Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:00 crc kubenswrapper[4792]: I0929 18:57:00.279808 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:00 crc kubenswrapper[4792]: I0929 18:57:00.279906 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:00 crc kubenswrapper[4792]: I0929 18:57:00.279924 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:00 crc kubenswrapper[4792]: I0929 18:57:00.279954 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:00 crc kubenswrapper[4792]: I0929 18:57:00.279974 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:00Z","lastTransitionTime":"2025-09-29T18:57:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:00 crc kubenswrapper[4792]: I0929 18:57:00.287035 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-rqbjv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"67c58ee5-e056-4e3e-91ed-a116350f2408\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:53Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:53Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:53Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ms9xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cdd799a430b3a444b7ba74ae8c285de28790049390a462485812fe117f9dfbe8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cdd799a430b3a444b7ba74ae8c285de28790049390a462485812fe117f9dfbe8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T18:56:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ms9xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1d779fed3cf67ba40d6664f26d829858ec14749c48c09678b73d7fb8fe73c827\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1d779fed3cf67ba40d6664f26d829858ec14749c48c09678b73d7fb8fe73c827\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T18:56:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ms9xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3094a1172df2fd98e699c4d368a14584e51bce43389c9c6432e24e78d460a3eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3094a1172df2fd98e699c4d368a14584e51bce43389c9c6432e24e78d460a3eb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T18:56:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ms9xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://79dfa5c03ec31df7b6477646c437b7490658801c0b8f7fac5e9149e4c7a882bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://79dfa5c03ec31df7b6477646c437b7490658801c0b8f7fac5e9149e4c7a882bf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T18:56:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ms9xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://aed6e427a87d4a4617a9d1c9a4d37cf2f9815d1759336026545d563b1f9b6976\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ms9xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ms9xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:53Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-rqbjv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:00Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:00 crc kubenswrapper[4792]: I0929 18:57:00.305171 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"49187618-8fed-4b0f-bdf8-800408f708fc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://325b543480e9e1abd49c6ce98398a79ef51983b8035774b2e88447ee547733c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://12d3875b8db9620798f766024b1bc43b78759f42e467b67aaf87f0b0154a8fad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://100ab44da711fddded7f88aa053b6a47d1c8302557d9ae6a56d4f744140e34bd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://af34e705a941f92c031edf3d214a902640010036401914f60e598a46043d5eb3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:29Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:00Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:00 crc kubenswrapper[4792]: I0929 18:57:00.322843 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:52Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:52Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cb29207afd9a5fb06242890aaf6d32f2f789cbf824b0246706e7214486ac529c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:00Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:00 crc kubenswrapper[4792]: I0929 18:57:00.343228 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8067c4cf598ce2d361c4a76b51ef3cf14d1fc84ad7ee193d76e20cd980f197be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://693ef3ee15f0b8762a16adc20435397e073dea4b0028f4175899cb956eaab303\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:00Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:00 crc kubenswrapper[4792]: I0929 18:57:00.367972 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-hr4cm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"716c5fdd-0e02-4066-9210-93d805b6fe81\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:53Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:53Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d0516004c2ea4a5711f5e00dcfa01fd5c8d0c0d0d60fd31b0d7da586cd83a820\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d0516004c2ea4a5711f5e00dcfa01fd5c8d0c0d0d60fd31b0d7da586cd83a820\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T18:56:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:53Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-hr4cm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:00Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:00 crc kubenswrapper[4792]: I0929 18:57:00.383481 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:00 crc kubenswrapper[4792]: I0929 18:57:00.383537 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:00 crc kubenswrapper[4792]: I0929 18:57:00.383555 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:00 crc kubenswrapper[4792]: I0929 18:57:00.383584 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:00 crc kubenswrapper[4792]: I0929 18:57:00.383604 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:00Z","lastTransitionTime":"2025-09-29T18:57:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:00 crc kubenswrapper[4792]: I0929 18:57:00.388802 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-5hwvp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"100876d3-2539-47f1-91fa-0f91456ccac1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3ff4d11cfba0349ddf3f5a14c525716cfdff95c71698634e8feca328d6e41e2d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfblz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:53Z\\\"}}\" for pod \"openshift-multus\"/\"multus-5hwvp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:00Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:00 crc kubenswrapper[4792]: I0929 18:57:00.438285 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"10bc9cb6-78d3-43a6-8276-db1cb1c116e0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:29Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:29Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://238035b6ad975064a44e7e9e760ae9f09c9ff2735ecc69f65df3fc3176f6d3b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://659f798faed7d5c35bf7959b8e42a37f2289854714d513962716cb73a0674a27\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://38d29fa86782b007a60cbc1ea8f2ccc250dabb38c84eb61931b05fbc170e6538\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0fdb8c0687f0a5ce31078f6d7a9b643c41ad23199eff4b2878403ee5fd31f69f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0fdb8c0687f0a5ce31078f6d7a9b643c41ad23199eff4b2878403ee5fd31f69f\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T18:56:45Z\\\",\\\"message\\\":\\\"9 18:56:45.310775 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0929 18:56:45.310981 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI0929 18:56:45.312802 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0929 18:56:45.312836 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0929 18:56:45.312870 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0929 18:56:45.312900 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0929 18:56:45.312941 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI0929 18:56:45.312944 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0929 18:56:45.312985 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI0929 18:56:45.313033 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI0929 18:56:45.313121 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-682747971/tls.crt::/tmp/serving-cert-682747971/tls.key\\\\\\\"\\\\nI0929 18:56:45.313163 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-682747971/tls.crt::/tmp/serving-cert-682747971/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1759172204\\\\\\\\\\\\\\\" (2025-09-29 18:56:43 +0000 UTC to 2025-10-29 18:56:44 +0000 UTC (now=2025-09-29 18:56:45.313121069 +0000 UTC))\\\\\\\"\\\\nF0929 18:56:45.313206 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:44Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://07171a8c3c7812c016ee534ff1332f697d2b2cdfa70fc9d94ae6a5f312e0e433\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:32Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6e733b73d0293211bf2e8e97dc7db49c34e8ac1ef1e6e19013183d8518345959\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6e733b73d0293211bf2e8e97dc7db49c34e8ac1ef1e6e19013183d8518345959\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T18:56:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:29Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:00Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:00 crc kubenswrapper[4792]: I0929 18:57:00.453426 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ba8b9fef5faf6504a0e363f092cc9f60b03723775a0a0624b6302b3dac43a7ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:00Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:00 crc kubenswrapper[4792]: I0929 18:57:00.467229 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:00Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:00 crc kubenswrapper[4792]: I0929 18:57:00.482056 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-c228l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc16dcda-372e-4aac-8c12-148bf93e8783\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://af2529526423852e215c3201a4d8807a880e07e9cf71d593f304a4a3c99900eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6mz5l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:52Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-c228l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:00Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:00 crc kubenswrapper[4792]: I0929 18:57:00.486099 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:00 crc kubenswrapper[4792]: I0929 18:57:00.486151 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:00 crc kubenswrapper[4792]: I0929 18:57:00.486202 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:00 crc kubenswrapper[4792]: I0929 18:57:00.486239 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:00 crc kubenswrapper[4792]: I0929 18:57:00.486254 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:00Z","lastTransitionTime":"2025-09-29T18:57:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:00 crc kubenswrapper[4792]: I0929 18:57:00.496638 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:00Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:00 crc kubenswrapper[4792]: I0929 18:57:00.509046 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:00Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:00 crc kubenswrapper[4792]: I0929 18:57:00.518481 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-4gmtk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b84b9e91-b50e-4271-bfc8-be15652128c5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b32084075b7423c8211ca56595a2eb11add581b500043804cb09f13d07788bd6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lc999\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:55Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-4gmtk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:00Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:00 crc kubenswrapper[4792]: I0929 18:57:00.589948 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:00 crc kubenswrapper[4792]: I0929 18:57:00.590012 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:00 crc kubenswrapper[4792]: I0929 18:57:00.590027 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:00 crc kubenswrapper[4792]: I0929 18:57:00.590072 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:00 crc kubenswrapper[4792]: I0929 18:57:00.590084 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:00Z","lastTransitionTime":"2025-09-29T18:57:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:00 crc kubenswrapper[4792]: I0929 18:57:00.693236 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:00 crc kubenswrapper[4792]: I0929 18:57:00.693881 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:00 crc kubenswrapper[4792]: I0929 18:57:00.693951 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:00 crc kubenswrapper[4792]: I0929 18:57:00.694027 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:00 crc kubenswrapper[4792]: I0929 18:57:00.694089 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:00Z","lastTransitionTime":"2025-09-29T18:57:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:00 crc kubenswrapper[4792]: I0929 18:57:00.797293 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:00 crc kubenswrapper[4792]: I0929 18:57:00.797376 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:00 crc kubenswrapper[4792]: I0929 18:57:00.797398 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:00 crc kubenswrapper[4792]: I0929 18:57:00.797429 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:00 crc kubenswrapper[4792]: I0929 18:57:00.797451 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:00Z","lastTransitionTime":"2025-09-29T18:57:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:00 crc kubenswrapper[4792]: I0929 18:57:00.901308 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:00 crc kubenswrapper[4792]: I0929 18:57:00.901561 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:00 crc kubenswrapper[4792]: I0929 18:57:00.901670 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:00 crc kubenswrapper[4792]: I0929 18:57:00.901765 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:00 crc kubenswrapper[4792]: I0929 18:57:00.901872 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:00Z","lastTransitionTime":"2025-09-29T18:57:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:01 crc kubenswrapper[4792]: I0929 18:57:01.007537 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:01 crc kubenswrapper[4792]: I0929 18:57:01.007603 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:01 crc kubenswrapper[4792]: I0929 18:57:01.007627 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:01 crc kubenswrapper[4792]: I0929 18:57:01.007664 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:01 crc kubenswrapper[4792]: I0929 18:57:01.007688 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:01Z","lastTransitionTime":"2025-09-29T18:57:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:01 crc kubenswrapper[4792]: I0929 18:57:01.015183 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 18:57:01 crc kubenswrapper[4792]: I0929 18:57:01.015254 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 18:57:01 crc kubenswrapper[4792]: I0929 18:57:01.015364 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 18:57:01 crc kubenswrapper[4792]: E0929 18:57:01.015580 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 18:57:01 crc kubenswrapper[4792]: E0929 18:57:01.015754 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 18:57:01 crc kubenswrapper[4792]: E0929 18:57:01.015982 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 18:57:01 crc kubenswrapper[4792]: I0929 18:57:01.111744 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:01 crc kubenswrapper[4792]: I0929 18:57:01.111792 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:01 crc kubenswrapper[4792]: I0929 18:57:01.111805 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:01 crc kubenswrapper[4792]: I0929 18:57:01.111827 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:01 crc kubenswrapper[4792]: I0929 18:57:01.111840 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:01Z","lastTransitionTime":"2025-09-29T18:57:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:01 crc kubenswrapper[4792]: I0929 18:57:01.214589 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:01 crc kubenswrapper[4792]: I0929 18:57:01.214645 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:01 crc kubenswrapper[4792]: I0929 18:57:01.214667 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:01 crc kubenswrapper[4792]: I0929 18:57:01.214694 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:01 crc kubenswrapper[4792]: I0929 18:57:01.214712 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:01Z","lastTransitionTime":"2025-09-29T18:57:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:01 crc kubenswrapper[4792]: I0929 18:57:01.234165 4792 generic.go:334] "Generic (PLEG): container finished" podID="67c58ee5-e056-4e3e-91ed-a116350f2408" containerID="aed6e427a87d4a4617a9d1c9a4d37cf2f9815d1759336026545d563b1f9b6976" exitCode=0 Sep 29 18:57:01 crc kubenswrapper[4792]: I0929 18:57:01.234232 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-rqbjv" event={"ID":"67c58ee5-e056-4e3e-91ed-a116350f2408","Type":"ContainerDied","Data":"aed6e427a87d4a4617a9d1c9a4d37cf2f9815d1759336026545d563b1f9b6976"} Sep 29 18:57:01 crc kubenswrapper[4792]: I0929 18:57:01.239659 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-hr4cm" event={"ID":"716c5fdd-0e02-4066-9210-93d805b6fe81","Type":"ContainerStarted","Data":"a69c10a011d015e4ba98c0b6bdfe1a4d2644bb658d60896c45798ecb564cb563"} Sep 29 18:57:01 crc kubenswrapper[4792]: I0929 18:57:01.239996 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-hr4cm" Sep 29 18:57:01 crc kubenswrapper[4792]: I0929 18:57:01.250999 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8067c4cf598ce2d361c4a76b51ef3cf14d1fc84ad7ee193d76e20cd980f197be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://693ef3ee15f0b8762a16adc20435397e073dea4b0028f4175899cb956eaab303\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:01Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:01 crc kubenswrapper[4792]: I0929 18:57:01.279925 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-hr4cm" Sep 29 18:57:01 crc kubenswrapper[4792]: I0929 18:57:01.284996 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-hr4cm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"716c5fdd-0e02-4066-9210-93d805b6fe81\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:53Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:53Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d0516004c2ea4a5711f5e00dcfa01fd5c8d0c0d0d60fd31b0d7da586cd83a820\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d0516004c2ea4a5711f5e00dcfa01fd5c8d0c0d0d60fd31b0d7da586cd83a820\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T18:56:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:53Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-hr4cm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:01Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:01 crc kubenswrapper[4792]: I0929 18:57:01.303543 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-5hwvp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"100876d3-2539-47f1-91fa-0f91456ccac1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3ff4d11cfba0349ddf3f5a14c525716cfdff95c71698634e8feca328d6e41e2d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfblz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:53Z\\\"}}\" for pod \"openshift-multus\"/\"multus-5hwvp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:01Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:01 crc kubenswrapper[4792]: I0929 18:57:01.315492 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"10bc9cb6-78d3-43a6-8276-db1cb1c116e0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:29Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:29Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://238035b6ad975064a44e7e9e760ae9f09c9ff2735ecc69f65df3fc3176f6d3b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://659f798faed7d5c35bf7959b8e42a37f2289854714d513962716cb73a0674a27\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://38d29fa86782b007a60cbc1ea8f2ccc250dabb38c84eb61931b05fbc170e6538\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0fdb8c0687f0a5ce31078f6d7a9b643c41ad23199eff4b2878403ee5fd31f69f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0fdb8c0687f0a5ce31078f6d7a9b643c41ad23199eff4b2878403ee5fd31f69f\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T18:56:45Z\\\",\\\"message\\\":\\\"9 18:56:45.310775 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0929 18:56:45.310981 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI0929 18:56:45.312802 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0929 18:56:45.312836 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0929 18:56:45.312870 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0929 18:56:45.312900 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0929 18:56:45.312941 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI0929 18:56:45.312944 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0929 18:56:45.312985 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI0929 18:56:45.313033 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI0929 18:56:45.313121 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-682747971/tls.crt::/tmp/serving-cert-682747971/tls.key\\\\\\\"\\\\nI0929 18:56:45.313163 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-682747971/tls.crt::/tmp/serving-cert-682747971/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1759172204\\\\\\\\\\\\\\\" (2025-09-29 18:56:43 +0000 UTC to 2025-10-29 18:56:44 +0000 UTC (now=2025-09-29 18:56:45.313121069 +0000 UTC))\\\\\\\"\\\\nF0929 18:56:45.313206 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:44Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://07171a8c3c7812c016ee534ff1332f697d2b2cdfa70fc9d94ae6a5f312e0e433\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:32Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6e733b73d0293211bf2e8e97dc7db49c34e8ac1ef1e6e19013183d8518345959\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6e733b73d0293211bf2e8e97dc7db49c34e8ac1ef1e6e19013183d8518345959\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T18:56:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:29Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:01Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:01 crc kubenswrapper[4792]: I0929 18:57:01.317116 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:01 crc kubenswrapper[4792]: I0929 18:57:01.317161 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:01 crc kubenswrapper[4792]: I0929 18:57:01.317176 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:01 crc kubenswrapper[4792]: I0929 18:57:01.317199 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:01 crc kubenswrapper[4792]: I0929 18:57:01.317213 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:01Z","lastTransitionTime":"2025-09-29T18:57:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:01 crc kubenswrapper[4792]: I0929 18:57:01.329623 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ba8b9fef5faf6504a0e363f092cc9f60b03723775a0a0624b6302b3dac43a7ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:01Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:01 crc kubenswrapper[4792]: I0929 18:57:01.341674 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:01Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:01 crc kubenswrapper[4792]: I0929 18:57:01.350582 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-c228l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc16dcda-372e-4aac-8c12-148bf93e8783\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://af2529526423852e215c3201a4d8807a880e07e9cf71d593f304a4a3c99900eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6mz5l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:52Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-c228l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:01Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:01 crc kubenswrapper[4792]: I0929 18:57:01.362659 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:01Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:01 crc kubenswrapper[4792]: I0929 18:57:01.376084 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:01Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:01 crc kubenswrapper[4792]: I0929 18:57:01.387035 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-4gmtk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b84b9e91-b50e-4271-bfc8-be15652128c5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b32084075b7423c8211ca56595a2eb11add581b500043804cb09f13d07788bd6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lc999\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:55Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-4gmtk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:01Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:01 crc kubenswrapper[4792]: I0929 18:57:01.396966 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0ae66548-086e-4ca9-bd6f-281ce46e7557\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b27d8e307d9f6545acd48d9a838dc98fec84ca2e48b357966af22144b8cd415f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://305645f1f10b20984067c3d0d32bc9a5936e191faecff2bb494be005fc471c65\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:53Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-p5q59\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:01Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:01 crc kubenswrapper[4792]: I0929 18:57:01.410667 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-rqbjv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"67c58ee5-e056-4e3e-91ed-a116350f2408\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:53Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:53Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:53Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ms9xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cdd799a430b3a444b7ba74ae8c285de28790049390a462485812fe117f9dfbe8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cdd799a430b3a444b7ba74ae8c285de28790049390a462485812fe117f9dfbe8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T18:56:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ms9xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1d779fed3cf67ba40d6664f26d829858ec14749c48c09678b73d7fb8fe73c827\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1d779fed3cf67ba40d6664f26d829858ec14749c48c09678b73d7fb8fe73c827\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T18:56:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ms9xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3094a1172df2fd98e699c4d368a14584e51bce43389c9c6432e24e78d460a3eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3094a1172df2fd98e699c4d368a14584e51bce43389c9c6432e24e78d460a3eb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T18:56:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ms9xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://79dfa5c03ec31df7b6477646c437b7490658801c0b8f7fac5e9149e4c7a882bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://79dfa5c03ec31df7b6477646c437b7490658801c0b8f7fac5e9149e4c7a882bf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T18:56:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ms9xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://aed6e427a87d4a4617a9d1c9a4d37cf2f9815d1759336026545d563b1f9b6976\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aed6e427a87d4a4617a9d1c9a4d37cf2f9815d1759336026545d563b1f9b6976\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T18:57:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ms9xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ms9xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:53Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-rqbjv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:01Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:01 crc kubenswrapper[4792]: I0929 18:57:01.426844 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:01 crc kubenswrapper[4792]: I0929 18:57:01.427137 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:01 crc kubenswrapper[4792]: I0929 18:57:01.427229 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:01 crc kubenswrapper[4792]: I0929 18:57:01.427461 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:01 crc kubenswrapper[4792]: I0929 18:57:01.427638 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:01Z","lastTransitionTime":"2025-09-29T18:57:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:01 crc kubenswrapper[4792]: I0929 18:57:01.440682 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"49187618-8fed-4b0f-bdf8-800408f708fc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://325b543480e9e1abd49c6ce98398a79ef51983b8035774b2e88447ee547733c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://12d3875b8db9620798f766024b1bc43b78759f42e467b67aaf87f0b0154a8fad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://100ab44da711fddded7f88aa053b6a47d1c8302557d9ae6a56d4f744140e34bd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://af34e705a941f92c031edf3d214a902640010036401914f60e598a46043d5eb3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:29Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:01Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:01 crc kubenswrapper[4792]: I0929 18:57:01.483699 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:52Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:52Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cb29207afd9a5fb06242890aaf6d32f2f789cbf824b0246706e7214486ac529c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:01Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:01 crc kubenswrapper[4792]: I0929 18:57:01.501552 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:01Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:01 crc kubenswrapper[4792]: I0929 18:57:01.515683 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-4gmtk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b84b9e91-b50e-4271-bfc8-be15652128c5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b32084075b7423c8211ca56595a2eb11add581b500043804cb09f13d07788bd6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lc999\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:55Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-4gmtk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:01Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:01 crc kubenswrapper[4792]: I0929 18:57:01.527566 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:52Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:52Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cb29207afd9a5fb06242890aaf6d32f2f789cbf824b0246706e7214486ac529c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:01Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:01 crc kubenswrapper[4792]: I0929 18:57:01.531269 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:01 crc kubenswrapper[4792]: I0929 18:57:01.531313 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:01 crc kubenswrapper[4792]: I0929 18:57:01.531325 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:01 crc kubenswrapper[4792]: I0929 18:57:01.531342 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:01 crc kubenswrapper[4792]: I0929 18:57:01.531355 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:01Z","lastTransitionTime":"2025-09-29T18:57:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:01 crc kubenswrapper[4792]: I0929 18:57:01.545810 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0ae66548-086e-4ca9-bd6f-281ce46e7557\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b27d8e307d9f6545acd48d9a838dc98fec84ca2e48b357966af22144b8cd415f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://305645f1f10b20984067c3d0d32bc9a5936e191faecff2bb494be005fc471c65\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:53Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-p5q59\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:01Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:01 crc kubenswrapper[4792]: I0929 18:57:01.560891 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-rqbjv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"67c58ee5-e056-4e3e-91ed-a116350f2408\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:53Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:53Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:53Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ms9xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cdd799a430b3a444b7ba74ae8c285de28790049390a462485812fe117f9dfbe8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cdd799a430b3a444b7ba74ae8c285de28790049390a462485812fe117f9dfbe8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T18:56:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ms9xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1d779fed3cf67ba40d6664f26d829858ec14749c48c09678b73d7fb8fe73c827\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1d779fed3cf67ba40d6664f26d829858ec14749c48c09678b73d7fb8fe73c827\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T18:56:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ms9xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3094a1172df2fd98e699c4d368a14584e51bce43389c9c6432e24e78d460a3eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3094a1172df2fd98e699c4d368a14584e51bce43389c9c6432e24e78d460a3eb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T18:56:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ms9xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://79dfa5c03ec31df7b6477646c437b7490658801c0b8f7fac5e9149e4c7a882bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://79dfa5c03ec31df7b6477646c437b7490658801c0b8f7fac5e9149e4c7a882bf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T18:56:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ms9xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://aed6e427a87d4a4617a9d1c9a4d37cf2f9815d1759336026545d563b1f9b6976\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aed6e427a87d4a4617a9d1c9a4d37cf2f9815d1759336026545d563b1f9b6976\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T18:57:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ms9xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ms9xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:53Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-rqbjv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:01Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:01 crc kubenswrapper[4792]: I0929 18:57:01.576050 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"49187618-8fed-4b0f-bdf8-800408f708fc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://325b543480e9e1abd49c6ce98398a79ef51983b8035774b2e88447ee547733c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://12d3875b8db9620798f766024b1bc43b78759f42e467b67aaf87f0b0154a8fad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://100ab44da711fddded7f88aa053b6a47d1c8302557d9ae6a56d4f744140e34bd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://af34e705a941f92c031edf3d214a902640010036401914f60e598a46043d5eb3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:29Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:01Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:01 crc kubenswrapper[4792]: I0929 18:57:01.592079 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ba8b9fef5faf6504a0e363f092cc9f60b03723775a0a0624b6302b3dac43a7ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:01Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:01 crc kubenswrapper[4792]: I0929 18:57:01.606731 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8067c4cf598ce2d361c4a76b51ef3cf14d1fc84ad7ee193d76e20cd980f197be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://693ef3ee15f0b8762a16adc20435397e073dea4b0028f4175899cb956eaab303\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:01Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:01 crc kubenswrapper[4792]: I0929 18:57:01.625110 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-hr4cm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"716c5fdd-0e02-4066-9210-93d805b6fe81\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:53Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:53Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7b64445ce1e067504326c5005136522f885ba8796579cfb651019d2372a89173\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://46a3cf64e8fd5f5c75be0dd56175bd00e95e2780c73e39558e3b68ca1e6a44bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3408e50d82d1e7f50d9cd4fb2b4e078059bbc4daba10ca93c3cab56d4fe190be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a5023531ae972c8f19f5fbf8cdb3c4040f1b63d5d7b9d00e885607f0f84c88a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e9625b3628f291ecaa686da104d719695bd8c46eb46d08f9eccab27a2013627\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3a44c0899a9afeaa74bb22565c3f9514603ce1b83f9794539f677d067785925\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a69c10a011d015e4ba98c0b6bdfe1a4d2644bb658d60896c45798ecb564cb563\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:57:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7138892e31e3d1949d0ae4789515fc0fd9868469eb14de1464a2f59786b85f08\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d0516004c2ea4a5711f5e00dcfa01fd5c8d0c0d0d60fd31b0d7da586cd83a820\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d0516004c2ea4a5711f5e00dcfa01fd5c8d0c0d0d60fd31b0d7da586cd83a820\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T18:56:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:53Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-hr4cm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:01Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:01 crc kubenswrapper[4792]: I0929 18:57:01.634385 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:01 crc kubenswrapper[4792]: I0929 18:57:01.634422 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:01 crc kubenswrapper[4792]: I0929 18:57:01.634436 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:01 crc kubenswrapper[4792]: I0929 18:57:01.634455 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:01 crc kubenswrapper[4792]: I0929 18:57:01.634468 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:01Z","lastTransitionTime":"2025-09-29T18:57:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:01 crc kubenswrapper[4792]: I0929 18:57:01.638895 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-5hwvp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"100876d3-2539-47f1-91fa-0f91456ccac1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3ff4d11cfba0349ddf3f5a14c525716cfdff95c71698634e8feca328d6e41e2d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfblz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:53Z\\\"}}\" for pod \"openshift-multus\"/\"multus-5hwvp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:01Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:01 crc kubenswrapper[4792]: I0929 18:57:01.658860 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"10bc9cb6-78d3-43a6-8276-db1cb1c116e0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:29Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:29Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://238035b6ad975064a44e7e9e760ae9f09c9ff2735ecc69f65df3fc3176f6d3b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://659f798faed7d5c35bf7959b8e42a37f2289854714d513962716cb73a0674a27\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://38d29fa86782b007a60cbc1ea8f2ccc250dabb38c84eb61931b05fbc170e6538\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0fdb8c0687f0a5ce31078f6d7a9b643c41ad23199eff4b2878403ee5fd31f69f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0fdb8c0687f0a5ce31078f6d7a9b643c41ad23199eff4b2878403ee5fd31f69f\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T18:56:45Z\\\",\\\"message\\\":\\\"9 18:56:45.310775 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0929 18:56:45.310981 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI0929 18:56:45.312802 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0929 18:56:45.312836 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0929 18:56:45.312870 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0929 18:56:45.312900 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0929 18:56:45.312941 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI0929 18:56:45.312944 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0929 18:56:45.312985 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI0929 18:56:45.313033 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI0929 18:56:45.313121 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-682747971/tls.crt::/tmp/serving-cert-682747971/tls.key\\\\\\\"\\\\nI0929 18:56:45.313163 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-682747971/tls.crt::/tmp/serving-cert-682747971/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1759172204\\\\\\\\\\\\\\\" (2025-09-29 18:56:43 +0000 UTC to 2025-10-29 18:56:44 +0000 UTC (now=2025-09-29 18:56:45.313121069 +0000 UTC))\\\\\\\"\\\\nF0929 18:56:45.313206 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:44Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://07171a8c3c7812c016ee534ff1332f697d2b2cdfa70fc9d94ae6a5f312e0e433\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:32Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6e733b73d0293211bf2e8e97dc7db49c34e8ac1ef1e6e19013183d8518345959\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6e733b73d0293211bf2e8e97dc7db49c34e8ac1ef1e6e19013183d8518345959\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T18:56:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:29Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:01Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:01 crc kubenswrapper[4792]: I0929 18:57:01.671460 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:01Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:01 crc kubenswrapper[4792]: I0929 18:57:01.684766 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:01Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:01 crc kubenswrapper[4792]: I0929 18:57:01.694721 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-c228l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc16dcda-372e-4aac-8c12-148bf93e8783\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://af2529526423852e215c3201a4d8807a880e07e9cf71d593f304a4a3c99900eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6mz5l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:52Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-c228l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:01Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:01 crc kubenswrapper[4792]: I0929 18:57:01.736269 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:01 crc kubenswrapper[4792]: I0929 18:57:01.736300 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:01 crc kubenswrapper[4792]: I0929 18:57:01.736308 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:01 crc kubenswrapper[4792]: I0929 18:57:01.736320 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:01 crc kubenswrapper[4792]: I0929 18:57:01.736329 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:01Z","lastTransitionTime":"2025-09-29T18:57:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:01 crc kubenswrapper[4792]: I0929 18:57:01.838984 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:01 crc kubenswrapper[4792]: I0929 18:57:01.839019 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:01 crc kubenswrapper[4792]: I0929 18:57:01.839030 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:01 crc kubenswrapper[4792]: I0929 18:57:01.839046 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:01 crc kubenswrapper[4792]: I0929 18:57:01.839058 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:01Z","lastTransitionTime":"2025-09-29T18:57:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:01 crc kubenswrapper[4792]: I0929 18:57:01.942008 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:01 crc kubenswrapper[4792]: I0929 18:57:01.942051 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:01 crc kubenswrapper[4792]: I0929 18:57:01.942067 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:01 crc kubenswrapper[4792]: I0929 18:57:01.942089 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:01 crc kubenswrapper[4792]: I0929 18:57:01.942108 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:01Z","lastTransitionTime":"2025-09-29T18:57:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:02 crc kubenswrapper[4792]: I0929 18:57:02.044592 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:02 crc kubenswrapper[4792]: I0929 18:57:02.044630 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:02 crc kubenswrapper[4792]: I0929 18:57:02.044641 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:02 crc kubenswrapper[4792]: I0929 18:57:02.044656 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:02 crc kubenswrapper[4792]: I0929 18:57:02.044667 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:02Z","lastTransitionTime":"2025-09-29T18:57:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:02 crc kubenswrapper[4792]: I0929 18:57:02.147336 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:02 crc kubenswrapper[4792]: I0929 18:57:02.147381 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:02 crc kubenswrapper[4792]: I0929 18:57:02.147393 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:02 crc kubenswrapper[4792]: I0929 18:57:02.147410 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:02 crc kubenswrapper[4792]: I0929 18:57:02.147420 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:02Z","lastTransitionTime":"2025-09-29T18:57:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:02 crc kubenswrapper[4792]: I0929 18:57:02.250429 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:02 crc kubenswrapper[4792]: I0929 18:57:02.250487 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:02 crc kubenswrapper[4792]: I0929 18:57:02.250506 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:02 crc kubenswrapper[4792]: I0929 18:57:02.250532 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:02 crc kubenswrapper[4792]: I0929 18:57:02.250553 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:02Z","lastTransitionTime":"2025-09-29T18:57:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:02 crc kubenswrapper[4792]: I0929 18:57:02.253746 4792 generic.go:334] "Generic (PLEG): container finished" podID="67c58ee5-e056-4e3e-91ed-a116350f2408" containerID="0830e9f46c282fc2be6beeea2654758eb0b3a0a86b802f495928c846bc49f7ed" exitCode=0 Sep 29 18:57:02 crc kubenswrapper[4792]: I0929 18:57:02.253796 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-rqbjv" event={"ID":"67c58ee5-e056-4e3e-91ed-a116350f2408","Type":"ContainerDied","Data":"0830e9f46c282fc2be6beeea2654758eb0b3a0a86b802f495928c846bc49f7ed"} Sep 29 18:57:02 crc kubenswrapper[4792]: I0929 18:57:02.253988 4792 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Sep 29 18:57:02 crc kubenswrapper[4792]: I0929 18:57:02.254440 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-hr4cm" Sep 29 18:57:02 crc kubenswrapper[4792]: I0929 18:57:02.274269 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"49187618-8fed-4b0f-bdf8-800408f708fc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://325b543480e9e1abd49c6ce98398a79ef51983b8035774b2e88447ee547733c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://12d3875b8db9620798f766024b1bc43b78759f42e467b67aaf87f0b0154a8fad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://100ab44da711fddded7f88aa053b6a47d1c8302557d9ae6a56d4f744140e34bd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://af34e705a941f92c031edf3d214a902640010036401914f60e598a46043d5eb3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:29Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:02Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:02 crc kubenswrapper[4792]: I0929 18:57:02.301405 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-hr4cm" Sep 29 18:57:02 crc kubenswrapper[4792]: I0929 18:57:02.302615 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:52Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:52Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cb29207afd9a5fb06242890aaf6d32f2f789cbf824b0246706e7214486ac529c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:02Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:02 crc kubenswrapper[4792]: I0929 18:57:02.321149 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0ae66548-086e-4ca9-bd6f-281ce46e7557\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b27d8e307d9f6545acd48d9a838dc98fec84ca2e48b357966af22144b8cd415f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://305645f1f10b20984067c3d0d32bc9a5936e191faecff2bb494be005fc471c65\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:53Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-p5q59\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:02Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:02 crc kubenswrapper[4792]: I0929 18:57:02.342962 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-rqbjv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"67c58ee5-e056-4e3e-91ed-a116350f2408\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:53Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:53Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ms9xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cdd799a430b3a444b7ba74ae8c285de28790049390a462485812fe117f9dfbe8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cdd799a430b3a444b7ba74ae8c285de28790049390a462485812fe117f9dfbe8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T18:56:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ms9xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1d779fed3cf67ba40d6664f26d829858ec14749c48c09678b73d7fb8fe73c827\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1d779fed3cf67ba40d6664f26d829858ec14749c48c09678b73d7fb8fe73c827\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T18:56:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ms9xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3094a1172df2fd98e699c4d368a14584e51bce43389c9c6432e24e78d460a3eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3094a1172df2fd98e699c4d368a14584e51bce43389c9c6432e24e78d460a3eb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T18:56:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ms9xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://79dfa5c03ec31df7b6477646c437b7490658801c0b8f7fac5e9149e4c7a882bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://79dfa5c03ec31df7b6477646c437b7490658801c0b8f7fac5e9149e4c7a882bf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T18:56:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ms9xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://aed6e427a87d4a4617a9d1c9a4d37cf2f9815d1759336026545d563b1f9b6976\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aed6e427a87d4a4617a9d1c9a4d37cf2f9815d1759336026545d563b1f9b6976\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T18:57:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ms9xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0830e9f46c282fc2be6beeea2654758eb0b3a0a86b802f495928c846bc49f7ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0830e9f46c282fc2be6beeea2654758eb0b3a0a86b802f495928c846bc49f7ed\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T18:57:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T18:57:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ms9xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:53Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-rqbjv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:02Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:02 crc kubenswrapper[4792]: I0929 18:57:02.355600 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:02 crc kubenswrapper[4792]: I0929 18:57:02.355648 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:02 crc kubenswrapper[4792]: I0929 18:57:02.355663 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:02 crc kubenswrapper[4792]: I0929 18:57:02.355685 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:02 crc kubenswrapper[4792]: I0929 18:57:02.355699 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:02Z","lastTransitionTime":"2025-09-29T18:57:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:02 crc kubenswrapper[4792]: I0929 18:57:02.360383 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"10bc9cb6-78d3-43a6-8276-db1cb1c116e0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:29Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:29Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://238035b6ad975064a44e7e9e760ae9f09c9ff2735ecc69f65df3fc3176f6d3b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://659f798faed7d5c35bf7959b8e42a37f2289854714d513962716cb73a0674a27\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://38d29fa86782b007a60cbc1ea8f2ccc250dabb38c84eb61931b05fbc170e6538\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0fdb8c0687f0a5ce31078f6d7a9b643c41ad23199eff4b2878403ee5fd31f69f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0fdb8c0687f0a5ce31078f6d7a9b643c41ad23199eff4b2878403ee5fd31f69f\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T18:56:45Z\\\",\\\"message\\\":\\\"9 18:56:45.310775 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0929 18:56:45.310981 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI0929 18:56:45.312802 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0929 18:56:45.312836 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0929 18:56:45.312870 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0929 18:56:45.312900 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0929 18:56:45.312941 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI0929 18:56:45.312944 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0929 18:56:45.312985 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI0929 18:56:45.313033 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI0929 18:56:45.313121 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-682747971/tls.crt::/tmp/serving-cert-682747971/tls.key\\\\\\\"\\\\nI0929 18:56:45.313163 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-682747971/tls.crt::/tmp/serving-cert-682747971/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1759172204\\\\\\\\\\\\\\\" (2025-09-29 18:56:43 +0000 UTC to 2025-10-29 18:56:44 +0000 UTC (now=2025-09-29 18:56:45.313121069 +0000 UTC))\\\\\\\"\\\\nF0929 18:56:45.313206 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:44Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://07171a8c3c7812c016ee534ff1332f697d2b2cdfa70fc9d94ae6a5f312e0e433\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:32Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6e733b73d0293211bf2e8e97dc7db49c34e8ac1ef1e6e19013183d8518345959\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6e733b73d0293211bf2e8e97dc7db49c34e8ac1ef1e6e19013183d8518345959\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T18:56:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:29Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:02Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:02 crc kubenswrapper[4792]: I0929 18:57:02.377490 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ba8b9fef5faf6504a0e363f092cc9f60b03723775a0a0624b6302b3dac43a7ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:02Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:02 crc kubenswrapper[4792]: I0929 18:57:02.394442 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8067c4cf598ce2d361c4a76b51ef3cf14d1fc84ad7ee193d76e20cd980f197be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://693ef3ee15f0b8762a16adc20435397e073dea4b0028f4175899cb956eaab303\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:02Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:02 crc kubenswrapper[4792]: I0929 18:57:02.418968 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-hr4cm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"716c5fdd-0e02-4066-9210-93d805b6fe81\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:53Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:53Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7b64445ce1e067504326c5005136522f885ba8796579cfb651019d2372a89173\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://46a3cf64e8fd5f5c75be0dd56175bd00e95e2780c73e39558e3b68ca1e6a44bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3408e50d82d1e7f50d9cd4fb2b4e078059bbc4daba10ca93c3cab56d4fe190be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a5023531ae972c8f19f5fbf8cdb3c4040f1b63d5d7b9d00e885607f0f84c88a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e9625b3628f291ecaa686da104d719695bd8c46eb46d08f9eccab27a2013627\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3a44c0899a9afeaa74bb22565c3f9514603ce1b83f9794539f677d067785925\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a69c10a011d015e4ba98c0b6bdfe1a4d2644bb658d60896c45798ecb564cb563\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:57:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7138892e31e3d1949d0ae4789515fc0fd9868469eb14de1464a2f59786b85f08\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d0516004c2ea4a5711f5e00dcfa01fd5c8d0c0d0d60fd31b0d7da586cd83a820\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d0516004c2ea4a5711f5e00dcfa01fd5c8d0c0d0d60fd31b0d7da586cd83a820\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T18:56:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:53Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-hr4cm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:02Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:02 crc kubenswrapper[4792]: I0929 18:57:02.433832 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-5hwvp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"100876d3-2539-47f1-91fa-0f91456ccac1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3ff4d11cfba0349ddf3f5a14c525716cfdff95c71698634e8feca328d6e41e2d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfblz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:53Z\\\"}}\" for pod \"openshift-multus\"/\"multus-5hwvp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:02Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:02 crc kubenswrapper[4792]: I0929 18:57:02.448183 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:02Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:02 crc kubenswrapper[4792]: I0929 18:57:02.462549 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:02Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:02 crc kubenswrapper[4792]: I0929 18:57:02.463801 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:02 crc kubenswrapper[4792]: I0929 18:57:02.463838 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:02 crc kubenswrapper[4792]: I0929 18:57:02.463869 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:02 crc kubenswrapper[4792]: I0929 18:57:02.463886 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:02 crc kubenswrapper[4792]: I0929 18:57:02.463897 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:02Z","lastTransitionTime":"2025-09-29T18:57:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:02 crc kubenswrapper[4792]: I0929 18:57:02.476948 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-c228l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc16dcda-372e-4aac-8c12-148bf93e8783\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://af2529526423852e215c3201a4d8807a880e07e9cf71d593f304a4a3c99900eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6mz5l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:52Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-c228l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:02Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:02 crc kubenswrapper[4792]: I0929 18:57:02.493283 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:02Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:02 crc kubenswrapper[4792]: I0929 18:57:02.504159 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-4gmtk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b84b9e91-b50e-4271-bfc8-be15652128c5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b32084075b7423c8211ca56595a2eb11add581b500043804cb09f13d07788bd6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lc999\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:55Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-4gmtk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:02Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:02 crc kubenswrapper[4792]: I0929 18:57:02.519183 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:02Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:02 crc kubenswrapper[4792]: I0929 18:57:02.530232 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-c228l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc16dcda-372e-4aac-8c12-148bf93e8783\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://af2529526423852e215c3201a4d8807a880e07e9cf71d593f304a4a3c99900eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6mz5l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:52Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-c228l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:02Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:02 crc kubenswrapper[4792]: I0929 18:57:02.543319 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:02Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:02 crc kubenswrapper[4792]: I0929 18:57:02.557478 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:02Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:02 crc kubenswrapper[4792]: I0929 18:57:02.566892 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:02 crc kubenswrapper[4792]: I0929 18:57:02.566932 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:02 crc kubenswrapper[4792]: I0929 18:57:02.566943 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:02 crc kubenswrapper[4792]: I0929 18:57:02.566958 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:02 crc kubenswrapper[4792]: I0929 18:57:02.566970 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:02Z","lastTransitionTime":"2025-09-29T18:57:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:02 crc kubenswrapper[4792]: I0929 18:57:02.568594 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-4gmtk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b84b9e91-b50e-4271-bfc8-be15652128c5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b32084075b7423c8211ca56595a2eb11add581b500043804cb09f13d07788bd6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lc999\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:55Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-4gmtk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:02Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:02 crc kubenswrapper[4792]: I0929 18:57:02.581256 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0ae66548-086e-4ca9-bd6f-281ce46e7557\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b27d8e307d9f6545acd48d9a838dc98fec84ca2e48b357966af22144b8cd415f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://305645f1f10b20984067c3d0d32bc9a5936e191faecff2bb494be005fc471c65\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:53Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-p5q59\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:02Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:02 crc kubenswrapper[4792]: I0929 18:57:02.595215 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-rqbjv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"67c58ee5-e056-4e3e-91ed-a116350f2408\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:53Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:53Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ms9xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cdd799a430b3a444b7ba74ae8c285de28790049390a462485812fe117f9dfbe8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cdd799a430b3a444b7ba74ae8c285de28790049390a462485812fe117f9dfbe8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T18:56:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ms9xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1d779fed3cf67ba40d6664f26d829858ec14749c48c09678b73d7fb8fe73c827\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1d779fed3cf67ba40d6664f26d829858ec14749c48c09678b73d7fb8fe73c827\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T18:56:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ms9xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3094a1172df2fd98e699c4d368a14584e51bce43389c9c6432e24e78d460a3eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3094a1172df2fd98e699c4d368a14584e51bce43389c9c6432e24e78d460a3eb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T18:56:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ms9xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://79dfa5c03ec31df7b6477646c437b7490658801c0b8f7fac5e9149e4c7a882bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://79dfa5c03ec31df7b6477646c437b7490658801c0b8f7fac5e9149e4c7a882bf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T18:56:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ms9xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://aed6e427a87d4a4617a9d1c9a4d37cf2f9815d1759336026545d563b1f9b6976\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aed6e427a87d4a4617a9d1c9a4d37cf2f9815d1759336026545d563b1f9b6976\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T18:57:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ms9xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0830e9f46c282fc2be6beeea2654758eb0b3a0a86b802f495928c846bc49f7ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0830e9f46c282fc2be6beeea2654758eb0b3a0a86b802f495928c846bc49f7ed\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T18:57:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T18:57:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ms9xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:53Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-rqbjv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:02Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:02 crc kubenswrapper[4792]: I0929 18:57:02.607576 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"49187618-8fed-4b0f-bdf8-800408f708fc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://325b543480e9e1abd49c6ce98398a79ef51983b8035774b2e88447ee547733c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://12d3875b8db9620798f766024b1bc43b78759f42e467b67aaf87f0b0154a8fad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://100ab44da711fddded7f88aa053b6a47d1c8302557d9ae6a56d4f744140e34bd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://af34e705a941f92c031edf3d214a902640010036401914f60e598a46043d5eb3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:29Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:02Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:02 crc kubenswrapper[4792]: I0929 18:57:02.620339 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:52Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:52Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cb29207afd9a5fb06242890aaf6d32f2f789cbf824b0246706e7214486ac529c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:02Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:02 crc kubenswrapper[4792]: I0929 18:57:02.633578 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8067c4cf598ce2d361c4a76b51ef3cf14d1fc84ad7ee193d76e20cd980f197be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://693ef3ee15f0b8762a16adc20435397e073dea4b0028f4175899cb956eaab303\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:02Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:02 crc kubenswrapper[4792]: I0929 18:57:02.667176 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-hr4cm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"716c5fdd-0e02-4066-9210-93d805b6fe81\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:53Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:53Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7b64445ce1e067504326c5005136522f885ba8796579cfb651019d2372a89173\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://46a3cf64e8fd5f5c75be0dd56175bd00e95e2780c73e39558e3b68ca1e6a44bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3408e50d82d1e7f50d9cd4fb2b4e078059bbc4daba10ca93c3cab56d4fe190be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a5023531ae972c8f19f5fbf8cdb3c4040f1b63d5d7b9d00e885607f0f84c88a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e9625b3628f291ecaa686da104d719695bd8c46eb46d08f9eccab27a2013627\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3a44c0899a9afeaa74bb22565c3f9514603ce1b83f9794539f677d067785925\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a69c10a011d015e4ba98c0b6bdfe1a4d2644bb658d60896c45798ecb564cb563\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:57:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7138892e31e3d1949d0ae4789515fc0fd9868469eb14de1464a2f59786b85f08\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d0516004c2ea4a5711f5e00dcfa01fd5c8d0c0d0d60fd31b0d7da586cd83a820\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d0516004c2ea4a5711f5e00dcfa01fd5c8d0c0d0d60fd31b0d7da586cd83a820\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T18:56:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:53Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-hr4cm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:02Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:02 crc kubenswrapper[4792]: I0929 18:57:02.669782 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:02 crc kubenswrapper[4792]: I0929 18:57:02.669821 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:02 crc kubenswrapper[4792]: I0929 18:57:02.669829 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:02 crc kubenswrapper[4792]: I0929 18:57:02.669867 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:02 crc kubenswrapper[4792]: I0929 18:57:02.669878 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:02Z","lastTransitionTime":"2025-09-29T18:57:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:02 crc kubenswrapper[4792]: I0929 18:57:02.695024 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-5hwvp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"100876d3-2539-47f1-91fa-0f91456ccac1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3ff4d11cfba0349ddf3f5a14c525716cfdff95c71698634e8feca328d6e41e2d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfblz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:53Z\\\"}}\" for pod \"openshift-multus\"/\"multus-5hwvp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:02Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:02 crc kubenswrapper[4792]: I0929 18:57:02.711842 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"10bc9cb6-78d3-43a6-8276-db1cb1c116e0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:29Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:29Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://238035b6ad975064a44e7e9e760ae9f09c9ff2735ecc69f65df3fc3176f6d3b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://659f798faed7d5c35bf7959b8e42a37f2289854714d513962716cb73a0674a27\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://38d29fa86782b007a60cbc1ea8f2ccc250dabb38c84eb61931b05fbc170e6538\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0fdb8c0687f0a5ce31078f6d7a9b643c41ad23199eff4b2878403ee5fd31f69f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0fdb8c0687f0a5ce31078f6d7a9b643c41ad23199eff4b2878403ee5fd31f69f\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T18:56:45Z\\\",\\\"message\\\":\\\"9 18:56:45.310775 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0929 18:56:45.310981 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI0929 18:56:45.312802 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0929 18:56:45.312836 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0929 18:56:45.312870 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0929 18:56:45.312900 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0929 18:56:45.312941 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI0929 18:56:45.312944 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0929 18:56:45.312985 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI0929 18:56:45.313033 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI0929 18:56:45.313121 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-682747971/tls.crt::/tmp/serving-cert-682747971/tls.key\\\\\\\"\\\\nI0929 18:56:45.313163 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-682747971/tls.crt::/tmp/serving-cert-682747971/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1759172204\\\\\\\\\\\\\\\" (2025-09-29 18:56:43 +0000 UTC to 2025-10-29 18:56:44 +0000 UTC (now=2025-09-29 18:56:45.313121069 +0000 UTC))\\\\\\\"\\\\nF0929 18:56:45.313206 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:44Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://07171a8c3c7812c016ee534ff1332f697d2b2cdfa70fc9d94ae6a5f312e0e433\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:32Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6e733b73d0293211bf2e8e97dc7db49c34e8ac1ef1e6e19013183d8518345959\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6e733b73d0293211bf2e8e97dc7db49c34e8ac1ef1e6e19013183d8518345959\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T18:56:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:29Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:02Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:02 crc kubenswrapper[4792]: I0929 18:57:02.726188 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ba8b9fef5faf6504a0e363f092cc9f60b03723775a0a0624b6302b3dac43a7ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:02Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:02 crc kubenswrapper[4792]: I0929 18:57:02.772541 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:02 crc kubenswrapper[4792]: I0929 18:57:02.772606 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:02 crc kubenswrapper[4792]: I0929 18:57:02.772619 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:02 crc kubenswrapper[4792]: I0929 18:57:02.772655 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:02 crc kubenswrapper[4792]: I0929 18:57:02.772667 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:02Z","lastTransitionTime":"2025-09-29T18:57:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:02 crc kubenswrapper[4792]: I0929 18:57:02.874573 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:02 crc kubenswrapper[4792]: I0929 18:57:02.874607 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:02 crc kubenswrapper[4792]: I0929 18:57:02.874624 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:02 crc kubenswrapper[4792]: I0929 18:57:02.874640 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:02 crc kubenswrapper[4792]: I0929 18:57:02.874651 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:02Z","lastTransitionTime":"2025-09-29T18:57:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:02 crc kubenswrapper[4792]: I0929 18:57:02.977159 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:02 crc kubenswrapper[4792]: I0929 18:57:02.977190 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:02 crc kubenswrapper[4792]: I0929 18:57:02.977201 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:02 crc kubenswrapper[4792]: I0929 18:57:02.977215 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:02 crc kubenswrapper[4792]: I0929 18:57:02.977225 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:02Z","lastTransitionTime":"2025-09-29T18:57:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:03 crc kubenswrapper[4792]: I0929 18:57:03.014995 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 18:57:03 crc kubenswrapper[4792]: E0929 18:57:03.015156 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 18:57:03 crc kubenswrapper[4792]: I0929 18:57:03.015527 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 18:57:03 crc kubenswrapper[4792]: E0929 18:57:03.015603 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 18:57:03 crc kubenswrapper[4792]: I0929 18:57:03.015654 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 18:57:03 crc kubenswrapper[4792]: E0929 18:57:03.015710 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 18:57:03 crc kubenswrapper[4792]: I0929 18:57:03.080954 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:03 crc kubenswrapper[4792]: I0929 18:57:03.082085 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:03 crc kubenswrapper[4792]: I0929 18:57:03.082103 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:03 crc kubenswrapper[4792]: I0929 18:57:03.082127 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:03 crc kubenswrapper[4792]: I0929 18:57:03.082144 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:03Z","lastTransitionTime":"2025-09-29T18:57:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:03 crc kubenswrapper[4792]: I0929 18:57:03.185448 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:03 crc kubenswrapper[4792]: I0929 18:57:03.185502 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:03 crc kubenswrapper[4792]: I0929 18:57:03.185519 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:03 crc kubenswrapper[4792]: I0929 18:57:03.185541 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:03 crc kubenswrapper[4792]: I0929 18:57:03.185555 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:03Z","lastTransitionTime":"2025-09-29T18:57:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:03 crc kubenswrapper[4792]: I0929 18:57:03.260377 4792 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Sep 29 18:57:03 crc kubenswrapper[4792]: I0929 18:57:03.261082 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-rqbjv" event={"ID":"67c58ee5-e056-4e3e-91ed-a116350f2408","Type":"ContainerStarted","Data":"17470ef608c6b717c0346349c1e72046e200b3879df2772778878b0e83c05b7e"} Sep 29 18:57:03 crc kubenswrapper[4792]: I0929 18:57:03.272985 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:03Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:03 crc kubenswrapper[4792]: I0929 18:57:03.281737 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-4gmtk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b84b9e91-b50e-4271-bfc8-be15652128c5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b32084075b7423c8211ca56595a2eb11add581b500043804cb09f13d07788bd6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lc999\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:55Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-4gmtk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:03Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:03 crc kubenswrapper[4792]: I0929 18:57:03.289662 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:03 crc kubenswrapper[4792]: I0929 18:57:03.289687 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:03 crc kubenswrapper[4792]: I0929 18:57:03.289695 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:03 crc kubenswrapper[4792]: I0929 18:57:03.289709 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:03 crc kubenswrapper[4792]: I0929 18:57:03.289718 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:03Z","lastTransitionTime":"2025-09-29T18:57:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:03 crc kubenswrapper[4792]: I0929 18:57:03.294070 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"49187618-8fed-4b0f-bdf8-800408f708fc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://325b543480e9e1abd49c6ce98398a79ef51983b8035774b2e88447ee547733c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://12d3875b8db9620798f766024b1bc43b78759f42e467b67aaf87f0b0154a8fad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://100ab44da711fddded7f88aa053b6a47d1c8302557d9ae6a56d4f744140e34bd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://af34e705a941f92c031edf3d214a902640010036401914f60e598a46043d5eb3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:29Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:03Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:03 crc kubenswrapper[4792]: I0929 18:57:03.305568 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:52Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:52Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cb29207afd9a5fb06242890aaf6d32f2f789cbf824b0246706e7214486ac529c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:03Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:03 crc kubenswrapper[4792]: I0929 18:57:03.321179 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0ae66548-086e-4ca9-bd6f-281ce46e7557\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b27d8e307d9f6545acd48d9a838dc98fec84ca2e48b357966af22144b8cd415f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://305645f1f10b20984067c3d0d32bc9a5936e191faecff2bb494be005fc471c65\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:53Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-p5q59\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:03Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:03 crc kubenswrapper[4792]: I0929 18:57:03.342198 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-rqbjv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"67c58ee5-e056-4e3e-91ed-a116350f2408\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://17470ef608c6b717c0346349c1e72046e200b3879df2772778878b0e83c05b7e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:57:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ms9xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cdd799a430b3a444b7ba74ae8c285de28790049390a462485812fe117f9dfbe8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cdd799a430b3a444b7ba74ae8c285de28790049390a462485812fe117f9dfbe8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T18:56:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ms9xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1d779fed3cf67ba40d6664f26d829858ec14749c48c09678b73d7fb8fe73c827\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1d779fed3cf67ba40d6664f26d829858ec14749c48c09678b73d7fb8fe73c827\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T18:56:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ms9xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3094a1172df2fd98e699c4d368a14584e51bce43389c9c6432e24e78d460a3eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3094a1172df2fd98e699c4d368a14584e51bce43389c9c6432e24e78d460a3eb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T18:56:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ms9xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://79dfa5c03ec31df7b6477646c437b7490658801c0b8f7fac5e9149e4c7a882bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://79dfa5c03ec31df7b6477646c437b7490658801c0b8f7fac5e9149e4c7a882bf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T18:56:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ms9xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://aed6e427a87d4a4617a9d1c9a4d37cf2f9815d1759336026545d563b1f9b6976\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aed6e427a87d4a4617a9d1c9a4d37cf2f9815d1759336026545d563b1f9b6976\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T18:57:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ms9xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0830e9f46c282fc2be6beeea2654758eb0b3a0a86b802f495928c846bc49f7ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0830e9f46c282fc2be6beeea2654758eb0b3a0a86b802f495928c846bc49f7ed\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T18:57:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T18:57:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ms9xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:53Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-rqbjv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:03Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:03 crc kubenswrapper[4792]: I0929 18:57:03.357259 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"10bc9cb6-78d3-43a6-8276-db1cb1c116e0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:29Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:29Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://238035b6ad975064a44e7e9e760ae9f09c9ff2735ecc69f65df3fc3176f6d3b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://659f798faed7d5c35bf7959b8e42a37f2289854714d513962716cb73a0674a27\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://38d29fa86782b007a60cbc1ea8f2ccc250dabb38c84eb61931b05fbc170e6538\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0fdb8c0687f0a5ce31078f6d7a9b643c41ad23199eff4b2878403ee5fd31f69f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0fdb8c0687f0a5ce31078f6d7a9b643c41ad23199eff4b2878403ee5fd31f69f\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T18:56:45Z\\\",\\\"message\\\":\\\"9 18:56:45.310775 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0929 18:56:45.310981 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI0929 18:56:45.312802 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0929 18:56:45.312836 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0929 18:56:45.312870 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0929 18:56:45.312900 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0929 18:56:45.312941 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI0929 18:56:45.312944 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0929 18:56:45.312985 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI0929 18:56:45.313033 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI0929 18:56:45.313121 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-682747971/tls.crt::/tmp/serving-cert-682747971/tls.key\\\\\\\"\\\\nI0929 18:56:45.313163 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-682747971/tls.crt::/tmp/serving-cert-682747971/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1759172204\\\\\\\\\\\\\\\" (2025-09-29 18:56:43 +0000 UTC to 2025-10-29 18:56:44 +0000 UTC (now=2025-09-29 18:56:45.313121069 +0000 UTC))\\\\\\\"\\\\nF0929 18:56:45.313206 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:44Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://07171a8c3c7812c016ee534ff1332f697d2b2cdfa70fc9d94ae6a5f312e0e433\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:32Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6e733b73d0293211bf2e8e97dc7db49c34e8ac1ef1e6e19013183d8518345959\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6e733b73d0293211bf2e8e97dc7db49c34e8ac1ef1e6e19013183d8518345959\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T18:56:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:29Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:03Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:03 crc kubenswrapper[4792]: I0929 18:57:03.372702 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ba8b9fef5faf6504a0e363f092cc9f60b03723775a0a0624b6302b3dac43a7ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:03Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:03 crc kubenswrapper[4792]: I0929 18:57:03.389487 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8067c4cf598ce2d361c4a76b51ef3cf14d1fc84ad7ee193d76e20cd980f197be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://693ef3ee15f0b8762a16adc20435397e073dea4b0028f4175899cb956eaab303\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:03Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:03 crc kubenswrapper[4792]: I0929 18:57:03.393359 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:03 crc kubenswrapper[4792]: I0929 18:57:03.393383 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:03 crc kubenswrapper[4792]: I0929 18:57:03.393391 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:03 crc kubenswrapper[4792]: I0929 18:57:03.393406 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:03 crc kubenswrapper[4792]: I0929 18:57:03.393414 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:03Z","lastTransitionTime":"2025-09-29T18:57:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:03 crc kubenswrapper[4792]: I0929 18:57:03.407799 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-hr4cm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"716c5fdd-0e02-4066-9210-93d805b6fe81\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:53Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:53Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7b64445ce1e067504326c5005136522f885ba8796579cfb651019d2372a89173\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://46a3cf64e8fd5f5c75be0dd56175bd00e95e2780c73e39558e3b68ca1e6a44bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3408e50d82d1e7f50d9cd4fb2b4e078059bbc4daba10ca93c3cab56d4fe190be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a5023531ae972c8f19f5fbf8cdb3c4040f1b63d5d7b9d00e885607f0f84c88a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e9625b3628f291ecaa686da104d719695bd8c46eb46d08f9eccab27a2013627\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3a44c0899a9afeaa74bb22565c3f9514603ce1b83f9794539f677d067785925\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a69c10a011d015e4ba98c0b6bdfe1a4d2644bb658d60896c45798ecb564cb563\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:57:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7138892e31e3d1949d0ae4789515fc0fd9868469eb14de1464a2f59786b85f08\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d0516004c2ea4a5711f5e00dcfa01fd5c8d0c0d0d60fd31b0d7da586cd83a820\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d0516004c2ea4a5711f5e00dcfa01fd5c8d0c0d0d60fd31b0d7da586cd83a820\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T18:56:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:53Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-hr4cm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:03Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:03 crc kubenswrapper[4792]: I0929 18:57:03.423508 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-5hwvp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"100876d3-2539-47f1-91fa-0f91456ccac1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3ff4d11cfba0349ddf3f5a14c525716cfdff95c71698634e8feca328d6e41e2d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfblz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:53Z\\\"}}\" for pod \"openshift-multus\"/\"multus-5hwvp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:03Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:03 crc kubenswrapper[4792]: I0929 18:57:03.441837 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:03Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:03 crc kubenswrapper[4792]: I0929 18:57:03.455800 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:03Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:03 crc kubenswrapper[4792]: I0929 18:57:03.466618 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-c228l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc16dcda-372e-4aac-8c12-148bf93e8783\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://af2529526423852e215c3201a4d8807a880e07e9cf71d593f304a4a3c99900eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6mz5l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:52Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-c228l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:03Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:03 crc kubenswrapper[4792]: I0929 18:57:03.495136 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:03 crc kubenswrapper[4792]: I0929 18:57:03.495168 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:03 crc kubenswrapper[4792]: I0929 18:57:03.495179 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:03 crc kubenswrapper[4792]: I0929 18:57:03.495193 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:03 crc kubenswrapper[4792]: I0929 18:57:03.495204 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:03Z","lastTransitionTime":"2025-09-29T18:57:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:03 crc kubenswrapper[4792]: I0929 18:57:03.597385 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:03 crc kubenswrapper[4792]: I0929 18:57:03.597431 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:03 crc kubenswrapper[4792]: I0929 18:57:03.597439 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:03 crc kubenswrapper[4792]: I0929 18:57:03.597453 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:03 crc kubenswrapper[4792]: I0929 18:57:03.597465 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:03Z","lastTransitionTime":"2025-09-29T18:57:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:03 crc kubenswrapper[4792]: I0929 18:57:03.699830 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:03 crc kubenswrapper[4792]: I0929 18:57:03.699898 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:03 crc kubenswrapper[4792]: I0929 18:57:03.699912 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:03 crc kubenswrapper[4792]: I0929 18:57:03.699934 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:03 crc kubenswrapper[4792]: I0929 18:57:03.699947 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:03Z","lastTransitionTime":"2025-09-29T18:57:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:03 crc kubenswrapper[4792]: I0929 18:57:03.802154 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:03 crc kubenswrapper[4792]: I0929 18:57:03.802200 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:03 crc kubenswrapper[4792]: I0929 18:57:03.802211 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:03 crc kubenswrapper[4792]: I0929 18:57:03.802227 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:03 crc kubenswrapper[4792]: I0929 18:57:03.802239 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:03Z","lastTransitionTime":"2025-09-29T18:57:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:03 crc kubenswrapper[4792]: I0929 18:57:03.903778 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:03 crc kubenswrapper[4792]: I0929 18:57:03.903814 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:03 crc kubenswrapper[4792]: I0929 18:57:03.903824 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:03 crc kubenswrapper[4792]: I0929 18:57:03.903839 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:03 crc kubenswrapper[4792]: I0929 18:57:03.903879 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:03Z","lastTransitionTime":"2025-09-29T18:57:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:04 crc kubenswrapper[4792]: I0929 18:57:04.006486 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:04 crc kubenswrapper[4792]: I0929 18:57:04.006522 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:04 crc kubenswrapper[4792]: I0929 18:57:04.006532 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:04 crc kubenswrapper[4792]: I0929 18:57:04.006547 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:04 crc kubenswrapper[4792]: I0929 18:57:04.006558 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:04Z","lastTransitionTime":"2025-09-29T18:57:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:04 crc kubenswrapper[4792]: I0929 18:57:04.109119 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:04 crc kubenswrapper[4792]: I0929 18:57:04.109153 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:04 crc kubenswrapper[4792]: I0929 18:57:04.109161 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:04 crc kubenswrapper[4792]: I0929 18:57:04.109175 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:04 crc kubenswrapper[4792]: I0929 18:57:04.109183 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:04Z","lastTransitionTime":"2025-09-29T18:57:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:04 crc kubenswrapper[4792]: I0929 18:57:04.211162 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:04 crc kubenswrapper[4792]: I0929 18:57:04.211206 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:04 crc kubenswrapper[4792]: I0929 18:57:04.211214 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:04 crc kubenswrapper[4792]: I0929 18:57:04.211227 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:04 crc kubenswrapper[4792]: I0929 18:57:04.211235 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:04Z","lastTransitionTime":"2025-09-29T18:57:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:04 crc kubenswrapper[4792]: I0929 18:57:04.262441 4792 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Sep 29 18:57:04 crc kubenswrapper[4792]: I0929 18:57:04.313612 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:04 crc kubenswrapper[4792]: I0929 18:57:04.313648 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:04 crc kubenswrapper[4792]: I0929 18:57:04.313659 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:04 crc kubenswrapper[4792]: I0929 18:57:04.313676 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:04 crc kubenswrapper[4792]: I0929 18:57:04.313688 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:04Z","lastTransitionTime":"2025-09-29T18:57:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:04 crc kubenswrapper[4792]: I0929 18:57:04.415411 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:04 crc kubenswrapper[4792]: I0929 18:57:04.415449 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:04 crc kubenswrapper[4792]: I0929 18:57:04.415462 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:04 crc kubenswrapper[4792]: I0929 18:57:04.415477 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:04 crc kubenswrapper[4792]: I0929 18:57:04.415487 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:04Z","lastTransitionTime":"2025-09-29T18:57:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:04 crc kubenswrapper[4792]: I0929 18:57:04.517920 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:04 crc kubenswrapper[4792]: I0929 18:57:04.517961 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:04 crc kubenswrapper[4792]: I0929 18:57:04.517972 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:04 crc kubenswrapper[4792]: I0929 18:57:04.517989 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:04 crc kubenswrapper[4792]: I0929 18:57:04.518003 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:04Z","lastTransitionTime":"2025-09-29T18:57:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:04 crc kubenswrapper[4792]: I0929 18:57:04.619832 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:04 crc kubenswrapper[4792]: I0929 18:57:04.619884 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:04 crc kubenswrapper[4792]: I0929 18:57:04.619896 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:04 crc kubenswrapper[4792]: I0929 18:57:04.619912 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:04 crc kubenswrapper[4792]: I0929 18:57:04.619922 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:04Z","lastTransitionTime":"2025-09-29T18:57:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:04 crc kubenswrapper[4792]: I0929 18:57:04.689755 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 18:57:04 crc kubenswrapper[4792]: E0929 18:57:04.689873 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 18:57:20.68983528 +0000 UTC m=+52.683142676 (durationBeforeRetry 16s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 18:57:04 crc kubenswrapper[4792]: I0929 18:57:04.689951 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 18:57:04 crc kubenswrapper[4792]: E0929 18:57:04.690041 4792 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Sep 29 18:57:04 crc kubenswrapper[4792]: E0929 18:57:04.690078 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-09-29 18:57:20.690071906 +0000 UTC m=+52.683379302 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Sep 29 18:57:04 crc kubenswrapper[4792]: I0929 18:57:04.722183 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:04 crc kubenswrapper[4792]: I0929 18:57:04.722215 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:04 crc kubenswrapper[4792]: I0929 18:57:04.722225 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:04 crc kubenswrapper[4792]: I0929 18:57:04.722240 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:04 crc kubenswrapper[4792]: I0929 18:57:04.722251 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:04Z","lastTransitionTime":"2025-09-29T18:57:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:04 crc kubenswrapper[4792]: I0929 18:57:04.791192 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 18:57:04 crc kubenswrapper[4792]: I0929 18:57:04.791283 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 18:57:04 crc kubenswrapper[4792]: I0929 18:57:04.791377 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 18:57:04 crc kubenswrapper[4792]: E0929 18:57:04.791438 4792 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Sep 29 18:57:04 crc kubenswrapper[4792]: E0929 18:57:04.791467 4792 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Sep 29 18:57:04 crc kubenswrapper[4792]: E0929 18:57:04.791485 4792 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Sep 29 18:57:04 crc kubenswrapper[4792]: E0929 18:57:04.791511 4792 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Sep 29 18:57:04 crc kubenswrapper[4792]: E0929 18:57:04.791513 4792 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 29 18:57:04 crc kubenswrapper[4792]: E0929 18:57:04.791531 4792 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Sep 29 18:57:04 crc kubenswrapper[4792]: E0929 18:57:04.791543 4792 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 29 18:57:04 crc kubenswrapper[4792]: E0929 18:57:04.791548 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-09-29 18:57:20.791527973 +0000 UTC m=+52.784835379 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Sep 29 18:57:04 crc kubenswrapper[4792]: E0929 18:57:04.791574 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-09-29 18:57:20.791565614 +0000 UTC m=+52.784873020 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 29 18:57:04 crc kubenswrapper[4792]: E0929 18:57:04.791603 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-09-29 18:57:20.791580544 +0000 UTC m=+52.784888000 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 29 18:57:04 crc kubenswrapper[4792]: I0929 18:57:04.824142 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:04 crc kubenswrapper[4792]: I0929 18:57:04.824174 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:04 crc kubenswrapper[4792]: I0929 18:57:04.824185 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:04 crc kubenswrapper[4792]: I0929 18:57:04.824200 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:04 crc kubenswrapper[4792]: I0929 18:57:04.824210 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:04Z","lastTransitionTime":"2025-09-29T18:57:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:04 crc kubenswrapper[4792]: I0929 18:57:04.926513 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:04 crc kubenswrapper[4792]: I0929 18:57:04.926544 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:04 crc kubenswrapper[4792]: I0929 18:57:04.926552 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:04 crc kubenswrapper[4792]: I0929 18:57:04.926563 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:04 crc kubenswrapper[4792]: I0929 18:57:04.926571 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:04Z","lastTransitionTime":"2025-09-29T18:57:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:05 crc kubenswrapper[4792]: I0929 18:57:05.015198 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 18:57:05 crc kubenswrapper[4792]: I0929 18:57:05.015198 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 18:57:05 crc kubenswrapper[4792]: I0929 18:57:05.015217 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 18:57:05 crc kubenswrapper[4792]: E0929 18:57:05.015681 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 18:57:05 crc kubenswrapper[4792]: E0929 18:57:05.015730 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 18:57:05 crc kubenswrapper[4792]: E0929 18:57:05.015745 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 18:57:05 crc kubenswrapper[4792]: I0929 18:57:05.028190 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:05 crc kubenswrapper[4792]: I0929 18:57:05.028224 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:05 crc kubenswrapper[4792]: I0929 18:57:05.028236 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:05 crc kubenswrapper[4792]: I0929 18:57:05.028251 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:05 crc kubenswrapper[4792]: I0929 18:57:05.028262 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:05Z","lastTransitionTime":"2025-09-29T18:57:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:05 crc kubenswrapper[4792]: I0929 18:57:05.129909 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:05 crc kubenswrapper[4792]: I0929 18:57:05.130177 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:05 crc kubenswrapper[4792]: I0929 18:57:05.130186 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:05 crc kubenswrapper[4792]: I0929 18:57:05.130198 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:05 crc kubenswrapper[4792]: I0929 18:57:05.130206 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:05Z","lastTransitionTime":"2025-09-29T18:57:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:05 crc kubenswrapper[4792]: I0929 18:57:05.182833 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:05 crc kubenswrapper[4792]: I0929 18:57:05.182905 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:05 crc kubenswrapper[4792]: I0929 18:57:05.182919 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:05 crc kubenswrapper[4792]: I0929 18:57:05.182932 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:05 crc kubenswrapper[4792]: I0929 18:57:05.182942 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:05Z","lastTransitionTime":"2025-09-29T18:57:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:05 crc kubenswrapper[4792]: E0929 18:57:05.196635 4792 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T18:57:05Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:05Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T18:57:05Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:05Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T18:57:05Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:05Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T18:57:05Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:05Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"2b56982a-4dd9-4681-8997-0d414fe55985\\\",\\\"systemUUID\\\":\\\"798197c6-3029-4938-8b57-256852c71a3e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:05Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:05 crc kubenswrapper[4792]: I0929 18:57:05.199928 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:05 crc kubenswrapper[4792]: I0929 18:57:05.200039 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:05 crc kubenswrapper[4792]: I0929 18:57:05.200110 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:05 crc kubenswrapper[4792]: I0929 18:57:05.200181 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:05 crc kubenswrapper[4792]: I0929 18:57:05.200243 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:05Z","lastTransitionTime":"2025-09-29T18:57:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:05 crc kubenswrapper[4792]: E0929 18:57:05.216259 4792 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T18:57:05Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:05Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T18:57:05Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:05Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T18:57:05Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:05Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T18:57:05Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:05Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"2b56982a-4dd9-4681-8997-0d414fe55985\\\",\\\"systemUUID\\\":\\\"798197c6-3029-4938-8b57-256852c71a3e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:05Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:05 crc kubenswrapper[4792]: I0929 18:57:05.219901 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:05 crc kubenswrapper[4792]: I0929 18:57:05.219960 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:05 crc kubenswrapper[4792]: I0929 18:57:05.219979 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:05 crc kubenswrapper[4792]: I0929 18:57:05.220001 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:05 crc kubenswrapper[4792]: I0929 18:57:05.220020 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:05Z","lastTransitionTime":"2025-09-29T18:57:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:05 crc kubenswrapper[4792]: E0929 18:57:05.236742 4792 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T18:57:05Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:05Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T18:57:05Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:05Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T18:57:05Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:05Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T18:57:05Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:05Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"2b56982a-4dd9-4681-8997-0d414fe55985\\\",\\\"systemUUID\\\":\\\"798197c6-3029-4938-8b57-256852c71a3e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:05Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:05 crc kubenswrapper[4792]: I0929 18:57:05.240314 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:05 crc kubenswrapper[4792]: I0929 18:57:05.240465 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:05 crc kubenswrapper[4792]: I0929 18:57:05.240573 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:05 crc kubenswrapper[4792]: I0929 18:57:05.240682 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:05 crc kubenswrapper[4792]: I0929 18:57:05.240827 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:05Z","lastTransitionTime":"2025-09-29T18:57:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:05 crc kubenswrapper[4792]: E0929 18:57:05.254036 4792 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T18:57:05Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:05Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T18:57:05Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:05Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T18:57:05Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:05Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T18:57:05Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:05Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"2b56982a-4dd9-4681-8997-0d414fe55985\\\",\\\"systemUUID\\\":\\\"798197c6-3029-4938-8b57-256852c71a3e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:05Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:05 crc kubenswrapper[4792]: I0929 18:57:05.259178 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:05 crc kubenswrapper[4792]: I0929 18:57:05.259395 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:05 crc kubenswrapper[4792]: I0929 18:57:05.260029 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:05 crc kubenswrapper[4792]: I0929 18:57:05.260278 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:05 crc kubenswrapper[4792]: I0929 18:57:05.260522 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:05Z","lastTransitionTime":"2025-09-29T18:57:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:05 crc kubenswrapper[4792]: I0929 18:57:05.266628 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-hr4cm_716c5fdd-0e02-4066-9210-93d805b6fe81/ovnkube-controller/0.log" Sep 29 18:57:05 crc kubenswrapper[4792]: I0929 18:57:05.269760 4792 generic.go:334] "Generic (PLEG): container finished" podID="716c5fdd-0e02-4066-9210-93d805b6fe81" containerID="a69c10a011d015e4ba98c0b6bdfe1a4d2644bb658d60896c45798ecb564cb563" exitCode=1 Sep 29 18:57:05 crc kubenswrapper[4792]: I0929 18:57:05.269777 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-hr4cm" event={"ID":"716c5fdd-0e02-4066-9210-93d805b6fe81","Type":"ContainerDied","Data":"a69c10a011d015e4ba98c0b6bdfe1a4d2644bb658d60896c45798ecb564cb563"} Sep 29 18:57:05 crc kubenswrapper[4792]: I0929 18:57:05.270724 4792 scope.go:117] "RemoveContainer" containerID="a69c10a011d015e4ba98c0b6bdfe1a4d2644bb658d60896c45798ecb564cb563" Sep 29 18:57:05 crc kubenswrapper[4792]: E0929 18:57:05.280606 4792 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T18:57:05Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:05Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T18:57:05Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:05Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T18:57:05Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:05Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T18:57:05Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:05Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"2b56982a-4dd9-4681-8997-0d414fe55985\\\",\\\"systemUUID\\\":\\\"798197c6-3029-4938-8b57-256852c71a3e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:05Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:05 crc kubenswrapper[4792]: E0929 18:57:05.280773 4792 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Sep 29 18:57:05 crc kubenswrapper[4792]: I0929 18:57:05.282442 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:05 crc kubenswrapper[4792]: I0929 18:57:05.282467 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:05 crc kubenswrapper[4792]: I0929 18:57:05.282480 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:05 crc kubenswrapper[4792]: I0929 18:57:05.282502 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:05 crc kubenswrapper[4792]: I0929 18:57:05.282514 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:05Z","lastTransitionTime":"2025-09-29T18:57:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:05 crc kubenswrapper[4792]: I0929 18:57:05.291383 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"10bc9cb6-78d3-43a6-8276-db1cb1c116e0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:29Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:29Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://238035b6ad975064a44e7e9e760ae9f09c9ff2735ecc69f65df3fc3176f6d3b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://659f798faed7d5c35bf7959b8e42a37f2289854714d513962716cb73a0674a27\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://38d29fa86782b007a60cbc1ea8f2ccc250dabb38c84eb61931b05fbc170e6538\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0fdb8c0687f0a5ce31078f6d7a9b643c41ad23199eff4b2878403ee5fd31f69f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0fdb8c0687f0a5ce31078f6d7a9b643c41ad23199eff4b2878403ee5fd31f69f\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T18:56:45Z\\\",\\\"message\\\":\\\"9 18:56:45.310775 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0929 18:56:45.310981 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI0929 18:56:45.312802 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0929 18:56:45.312836 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0929 18:56:45.312870 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0929 18:56:45.312900 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0929 18:56:45.312941 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI0929 18:56:45.312944 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0929 18:56:45.312985 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI0929 18:56:45.313033 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI0929 18:56:45.313121 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-682747971/tls.crt::/tmp/serving-cert-682747971/tls.key\\\\\\\"\\\\nI0929 18:56:45.313163 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-682747971/tls.crt::/tmp/serving-cert-682747971/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1759172204\\\\\\\\\\\\\\\" (2025-09-29 18:56:43 +0000 UTC to 2025-10-29 18:56:44 +0000 UTC (now=2025-09-29 18:56:45.313121069 +0000 UTC))\\\\\\\"\\\\nF0929 18:56:45.313206 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:44Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://07171a8c3c7812c016ee534ff1332f697d2b2cdfa70fc9d94ae6a5f312e0e433\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:32Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6e733b73d0293211bf2e8e97dc7db49c34e8ac1ef1e6e19013183d8518345959\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6e733b73d0293211bf2e8e97dc7db49c34e8ac1ef1e6e19013183d8518345959\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T18:56:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:29Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:05Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:05 crc kubenswrapper[4792]: I0929 18:57:05.308990 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ba8b9fef5faf6504a0e363f092cc9f60b03723775a0a0624b6302b3dac43a7ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:05Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:05 crc kubenswrapper[4792]: I0929 18:57:05.319655 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8067c4cf598ce2d361c4a76b51ef3cf14d1fc84ad7ee193d76e20cd980f197be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://693ef3ee15f0b8762a16adc20435397e073dea4b0028f4175899cb956eaab303\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:05Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:05 crc kubenswrapper[4792]: I0929 18:57:05.336408 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-hr4cm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"716c5fdd-0e02-4066-9210-93d805b6fe81\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:53Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:53Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7b64445ce1e067504326c5005136522f885ba8796579cfb651019d2372a89173\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://46a3cf64e8fd5f5c75be0dd56175bd00e95e2780c73e39558e3b68ca1e6a44bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3408e50d82d1e7f50d9cd4fb2b4e078059bbc4daba10ca93c3cab56d4fe190be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a5023531ae972c8f19f5fbf8cdb3c4040f1b63d5d7b9d00e885607f0f84c88a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e9625b3628f291ecaa686da104d719695bd8c46eb46d08f9eccab27a2013627\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3a44c0899a9afeaa74bb22565c3f9514603ce1b83f9794539f677d067785925\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a69c10a011d015e4ba98c0b6bdfe1a4d2644bb658d60896c45798ecb564cb563\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a69c10a011d015e4ba98c0b6bdfe1a4d2644bb658d60896c45798ecb564cb563\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T18:57:04Z\\\",\\\"message\\\":\\\".go:208] Removed *v1.Namespace event handler 5\\\\nI0929 18:57:04.475247 5977 handler.go:208] Removed *v1.Node event handler 2\\\\nI0929 18:57:04.475254 5977 handler.go:208] Removed *v1.Node event handler 7\\\\nI0929 18:57:04.475261 5977 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0929 18:57:04.475268 5977 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI0929 18:57:04.475276 5977 handler.go:208] Removed *v1.Pod event handler 3\\\\nI0929 18:57:04.475478 5977 reflector.go:311] Stopping reflector *v1.EgressIP (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI0929 18:57:04.475614 5977 reflector.go:311] Stopping reflector *v1alpha1.BaselineAdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI0929 18:57:04.476210 5977 reflector.go:311] Stopping reflector *v1alpha1.AdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI0929 18:57:04.476299 5977 reflector.go:311] Stopping reflector *v1.Pod (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0929 18:57:04.476380 5977 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0929 18:57:04.476931 5977 factory.go:656] Stopping \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T18:57:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7138892e31e3d1949d0ae4789515fc0fd9868469eb14de1464a2f59786b85f08\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d0516004c2ea4a5711f5e00dcfa01fd5c8d0c0d0d60fd31b0d7da586cd83a820\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d0516004c2ea4a5711f5e00dcfa01fd5c8d0c0d0d60fd31b0d7da586cd83a820\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T18:56:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:53Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-hr4cm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:05Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:05 crc kubenswrapper[4792]: I0929 18:57:05.347530 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-5hwvp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"100876d3-2539-47f1-91fa-0f91456ccac1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3ff4d11cfba0349ddf3f5a14c525716cfdff95c71698634e8feca328d6e41e2d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfblz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:53Z\\\"}}\" for pod \"openshift-multus\"/\"multus-5hwvp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:05Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:05 crc kubenswrapper[4792]: I0929 18:57:05.358668 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:05Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:05 crc kubenswrapper[4792]: I0929 18:57:05.369055 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:05Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:05 crc kubenswrapper[4792]: I0929 18:57:05.380739 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-c228l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc16dcda-372e-4aac-8c12-148bf93e8783\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://af2529526423852e215c3201a4d8807a880e07e9cf71d593f304a4a3c99900eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6mz5l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:52Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-c228l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:05Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:05 crc kubenswrapper[4792]: I0929 18:57:05.384518 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:05 crc kubenswrapper[4792]: I0929 18:57:05.384561 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:05 crc kubenswrapper[4792]: I0929 18:57:05.384578 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:05 crc kubenswrapper[4792]: I0929 18:57:05.384598 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:05 crc kubenswrapper[4792]: I0929 18:57:05.384612 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:05Z","lastTransitionTime":"2025-09-29T18:57:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:05 crc kubenswrapper[4792]: I0929 18:57:05.395898 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:05Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:05 crc kubenswrapper[4792]: I0929 18:57:05.405053 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-4gmtk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b84b9e91-b50e-4271-bfc8-be15652128c5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b32084075b7423c8211ca56595a2eb11add581b500043804cb09f13d07788bd6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lc999\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:55Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-4gmtk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:05Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:05 crc kubenswrapper[4792]: I0929 18:57:05.415715 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"49187618-8fed-4b0f-bdf8-800408f708fc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://325b543480e9e1abd49c6ce98398a79ef51983b8035774b2e88447ee547733c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://12d3875b8db9620798f766024b1bc43b78759f42e467b67aaf87f0b0154a8fad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://100ab44da711fddded7f88aa053b6a47d1c8302557d9ae6a56d4f744140e34bd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://af34e705a941f92c031edf3d214a902640010036401914f60e598a46043d5eb3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:29Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:05Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:05 crc kubenswrapper[4792]: I0929 18:57:05.425015 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:52Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:52Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cb29207afd9a5fb06242890aaf6d32f2f789cbf824b0246706e7214486ac529c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:05Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:05 crc kubenswrapper[4792]: I0929 18:57:05.434406 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0ae66548-086e-4ca9-bd6f-281ce46e7557\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b27d8e307d9f6545acd48d9a838dc98fec84ca2e48b357966af22144b8cd415f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://305645f1f10b20984067c3d0d32bc9a5936e191faecff2bb494be005fc471c65\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:53Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-p5q59\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:05Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:05 crc kubenswrapper[4792]: I0929 18:57:05.446036 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-rqbjv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"67c58ee5-e056-4e3e-91ed-a116350f2408\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://17470ef608c6b717c0346349c1e72046e200b3879df2772778878b0e83c05b7e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:57:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ms9xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cdd799a430b3a444b7ba74ae8c285de28790049390a462485812fe117f9dfbe8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cdd799a430b3a444b7ba74ae8c285de28790049390a462485812fe117f9dfbe8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T18:56:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ms9xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1d779fed3cf67ba40d6664f26d829858ec14749c48c09678b73d7fb8fe73c827\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1d779fed3cf67ba40d6664f26d829858ec14749c48c09678b73d7fb8fe73c827\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T18:56:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ms9xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3094a1172df2fd98e699c4d368a14584e51bce43389c9c6432e24e78d460a3eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3094a1172df2fd98e699c4d368a14584e51bce43389c9c6432e24e78d460a3eb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T18:56:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ms9xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://79dfa5c03ec31df7b6477646c437b7490658801c0b8f7fac5e9149e4c7a882bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://79dfa5c03ec31df7b6477646c437b7490658801c0b8f7fac5e9149e4c7a882bf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T18:56:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ms9xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://aed6e427a87d4a4617a9d1c9a4d37cf2f9815d1759336026545d563b1f9b6976\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aed6e427a87d4a4617a9d1c9a4d37cf2f9815d1759336026545d563b1f9b6976\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T18:57:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ms9xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0830e9f46c282fc2be6beeea2654758eb0b3a0a86b802f495928c846bc49f7ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0830e9f46c282fc2be6beeea2654758eb0b3a0a86b802f495928c846bc49f7ed\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T18:57:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T18:57:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ms9xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:53Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-rqbjv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:05Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:05 crc kubenswrapper[4792]: I0929 18:57:05.486733 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:05 crc kubenswrapper[4792]: I0929 18:57:05.486768 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:05 crc kubenswrapper[4792]: I0929 18:57:05.486776 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:05 crc kubenswrapper[4792]: I0929 18:57:05.486790 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:05 crc kubenswrapper[4792]: I0929 18:57:05.486799 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:05Z","lastTransitionTime":"2025-09-29T18:57:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:05 crc kubenswrapper[4792]: I0929 18:57:05.589765 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:05 crc kubenswrapper[4792]: I0929 18:57:05.589814 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:05 crc kubenswrapper[4792]: I0929 18:57:05.589829 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:05 crc kubenswrapper[4792]: I0929 18:57:05.589864 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:05 crc kubenswrapper[4792]: I0929 18:57:05.589877 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:05Z","lastTransitionTime":"2025-09-29T18:57:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:05 crc kubenswrapper[4792]: I0929 18:57:05.692300 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:05 crc kubenswrapper[4792]: I0929 18:57:05.692326 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:05 crc kubenswrapper[4792]: I0929 18:57:05.692334 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:05 crc kubenswrapper[4792]: I0929 18:57:05.692346 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:05 crc kubenswrapper[4792]: I0929 18:57:05.692353 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:05Z","lastTransitionTime":"2025-09-29T18:57:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:05 crc kubenswrapper[4792]: I0929 18:57:05.794688 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:05 crc kubenswrapper[4792]: I0929 18:57:05.794735 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:05 crc kubenswrapper[4792]: I0929 18:57:05.794753 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:05 crc kubenswrapper[4792]: I0929 18:57:05.794774 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:05 crc kubenswrapper[4792]: I0929 18:57:05.794792 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:05Z","lastTransitionTime":"2025-09-29T18:57:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:05 crc kubenswrapper[4792]: I0929 18:57:05.897131 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:05 crc kubenswrapper[4792]: I0929 18:57:05.897157 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:05 crc kubenswrapper[4792]: I0929 18:57:05.897165 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:05 crc kubenswrapper[4792]: I0929 18:57:05.897178 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:05 crc kubenswrapper[4792]: I0929 18:57:05.897186 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:05Z","lastTransitionTime":"2025-09-29T18:57:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:06 crc kubenswrapper[4792]: I0929 18:57:05.999963 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:06 crc kubenswrapper[4792]: I0929 18:57:05.999996 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:06 crc kubenswrapper[4792]: I0929 18:57:06.000004 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:06 crc kubenswrapper[4792]: I0929 18:57:06.000017 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:06 crc kubenswrapper[4792]: I0929 18:57:06.000025 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:06Z","lastTransitionTime":"2025-09-29T18:57:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:06 crc kubenswrapper[4792]: I0929 18:57:06.102823 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:06 crc kubenswrapper[4792]: I0929 18:57:06.102905 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:06 crc kubenswrapper[4792]: I0929 18:57:06.102917 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:06 crc kubenswrapper[4792]: I0929 18:57:06.102934 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:06 crc kubenswrapper[4792]: I0929 18:57:06.102969 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:06Z","lastTransitionTime":"2025-09-29T18:57:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:06 crc kubenswrapper[4792]: I0929 18:57:06.205254 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:06 crc kubenswrapper[4792]: I0929 18:57:06.205297 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:06 crc kubenswrapper[4792]: I0929 18:57:06.205313 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:06 crc kubenswrapper[4792]: I0929 18:57:06.205334 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:06 crc kubenswrapper[4792]: I0929 18:57:06.205355 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:06Z","lastTransitionTime":"2025-09-29T18:57:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:06 crc kubenswrapper[4792]: I0929 18:57:06.284793 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-hr4cm_716c5fdd-0e02-4066-9210-93d805b6fe81/ovnkube-controller/0.log" Sep 29 18:57:06 crc kubenswrapper[4792]: I0929 18:57:06.307488 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:06 crc kubenswrapper[4792]: I0929 18:57:06.307551 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:06 crc kubenswrapper[4792]: I0929 18:57:06.307570 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:06 crc kubenswrapper[4792]: I0929 18:57:06.307596 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:06 crc kubenswrapper[4792]: I0929 18:57:06.307612 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:06Z","lastTransitionTime":"2025-09-29T18:57:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:06 crc kubenswrapper[4792]: I0929 18:57:06.362504 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-rr4g5"] Sep 29 18:57:06 crc kubenswrapper[4792]: I0929 18:57:06.363396 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-rr4g5" Sep 29 18:57:06 crc kubenswrapper[4792]: W0929 18:57:06.365113 4792 reflector.go:561] object-"openshift-ovn-kubernetes"/"ovn-control-plane-metrics-cert": failed to list *v1.Secret: secrets "ovn-control-plane-metrics-cert" is forbidden: User "system:node:crc" cannot list resource "secrets" in API group "" in the namespace "openshift-ovn-kubernetes": no relationship found between node 'crc' and this object Sep 29 18:57:06 crc kubenswrapper[4792]: E0929 18:57:06.365174 4792 reflector.go:158] "Unhandled Error" err="object-\"openshift-ovn-kubernetes\"/\"ovn-control-plane-metrics-cert\": Failed to watch *v1.Secret: failed to list *v1.Secret: secrets \"ovn-control-plane-metrics-cert\" is forbidden: User \"system:node:crc\" cannot list resource \"secrets\" in API group \"\" in the namespace \"openshift-ovn-kubernetes\": no relationship found between node 'crc' and this object" logger="UnhandledError" Sep 29 18:57:06 crc kubenswrapper[4792]: W0929 18:57:06.365723 4792 reflector.go:561] object-"openshift-ovn-kubernetes"/"ovn-kubernetes-control-plane-dockercfg-gs7dd": failed to list *v1.Secret: secrets "ovn-kubernetes-control-plane-dockercfg-gs7dd" is forbidden: User "system:node:crc" cannot list resource "secrets" in API group "" in the namespace "openshift-ovn-kubernetes": no relationship found between node 'crc' and this object Sep 29 18:57:06 crc kubenswrapper[4792]: E0929 18:57:06.365786 4792 reflector.go:158] "Unhandled Error" err="object-\"openshift-ovn-kubernetes\"/\"ovn-kubernetes-control-plane-dockercfg-gs7dd\": Failed to watch *v1.Secret: failed to list *v1.Secret: secrets \"ovn-kubernetes-control-plane-dockercfg-gs7dd\" is forbidden: User \"system:node:crc\" cannot list resource \"secrets\" in API group \"\" in the namespace \"openshift-ovn-kubernetes\": no relationship found between node 'crc' and this object" logger="UnhandledError" Sep 29 18:57:06 crc kubenswrapper[4792]: I0929 18:57:06.385464 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"49187618-8fed-4b0f-bdf8-800408f708fc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://325b543480e9e1abd49c6ce98398a79ef51983b8035774b2e88447ee547733c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://12d3875b8db9620798f766024b1bc43b78759f42e467b67aaf87f0b0154a8fad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://100ab44da711fddded7f88aa053b6a47d1c8302557d9ae6a56d4f744140e34bd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://af34e705a941f92c031edf3d214a902640010036401914f60e598a46043d5eb3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:29Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:06Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:06 crc kubenswrapper[4792]: I0929 18:57:06.405049 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:52Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:52Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cb29207afd9a5fb06242890aaf6d32f2f789cbf824b0246706e7214486ac529c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:06Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:06 crc kubenswrapper[4792]: I0929 18:57:06.406704 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/1762a3e4-6068-48d9-9b1d-bd5b893803bb-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-rr4g5\" (UID: \"1762a3e4-6068-48d9-9b1d-bd5b893803bb\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-rr4g5" Sep 29 18:57:06 crc kubenswrapper[4792]: I0929 18:57:06.406777 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/1762a3e4-6068-48d9-9b1d-bd5b893803bb-env-overrides\") pod \"ovnkube-control-plane-749d76644c-rr4g5\" (UID: \"1762a3e4-6068-48d9-9b1d-bd5b893803bb\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-rr4g5" Sep 29 18:57:06 crc kubenswrapper[4792]: I0929 18:57:06.406802 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xnnkn\" (UniqueName: \"kubernetes.io/projected/1762a3e4-6068-48d9-9b1d-bd5b893803bb-kube-api-access-xnnkn\") pod \"ovnkube-control-plane-749d76644c-rr4g5\" (UID: \"1762a3e4-6068-48d9-9b1d-bd5b893803bb\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-rr4g5" Sep 29 18:57:06 crc kubenswrapper[4792]: I0929 18:57:06.406827 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/1762a3e4-6068-48d9-9b1d-bd5b893803bb-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-rr4g5\" (UID: \"1762a3e4-6068-48d9-9b1d-bd5b893803bb\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-rr4g5" Sep 29 18:57:06 crc kubenswrapper[4792]: I0929 18:57:06.409259 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:06 crc kubenswrapper[4792]: I0929 18:57:06.409294 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:06 crc kubenswrapper[4792]: I0929 18:57:06.409306 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:06 crc kubenswrapper[4792]: I0929 18:57:06.409321 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:06 crc kubenswrapper[4792]: I0929 18:57:06.409333 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:06Z","lastTransitionTime":"2025-09-29T18:57:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:06 crc kubenswrapper[4792]: I0929 18:57:06.417136 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0ae66548-086e-4ca9-bd6f-281ce46e7557\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b27d8e307d9f6545acd48d9a838dc98fec84ca2e48b357966af22144b8cd415f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://305645f1f10b20984067c3d0d32bc9a5936e191faecff2bb494be005fc471c65\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:53Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-p5q59\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:06Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:06 crc kubenswrapper[4792]: I0929 18:57:06.433259 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-rqbjv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"67c58ee5-e056-4e3e-91ed-a116350f2408\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://17470ef608c6b717c0346349c1e72046e200b3879df2772778878b0e83c05b7e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:57:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ms9xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cdd799a430b3a444b7ba74ae8c285de28790049390a462485812fe117f9dfbe8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cdd799a430b3a444b7ba74ae8c285de28790049390a462485812fe117f9dfbe8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T18:56:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ms9xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1d779fed3cf67ba40d6664f26d829858ec14749c48c09678b73d7fb8fe73c827\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1d779fed3cf67ba40d6664f26d829858ec14749c48c09678b73d7fb8fe73c827\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T18:56:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ms9xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3094a1172df2fd98e699c4d368a14584e51bce43389c9c6432e24e78d460a3eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3094a1172df2fd98e699c4d368a14584e51bce43389c9c6432e24e78d460a3eb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T18:56:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ms9xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://79dfa5c03ec31df7b6477646c437b7490658801c0b8f7fac5e9149e4c7a882bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://79dfa5c03ec31df7b6477646c437b7490658801c0b8f7fac5e9149e4c7a882bf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T18:56:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ms9xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://aed6e427a87d4a4617a9d1c9a4d37cf2f9815d1759336026545d563b1f9b6976\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aed6e427a87d4a4617a9d1c9a4d37cf2f9815d1759336026545d563b1f9b6976\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T18:57:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ms9xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0830e9f46c282fc2be6beeea2654758eb0b3a0a86b802f495928c846bc49f7ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0830e9f46c282fc2be6beeea2654758eb0b3a0a86b802f495928c846bc49f7ed\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T18:57:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T18:57:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ms9xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:53Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-rqbjv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:06Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:06 crc kubenswrapper[4792]: I0929 18:57:06.448124 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"10bc9cb6-78d3-43a6-8276-db1cb1c116e0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:29Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:29Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://238035b6ad975064a44e7e9e760ae9f09c9ff2735ecc69f65df3fc3176f6d3b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://659f798faed7d5c35bf7959b8e42a37f2289854714d513962716cb73a0674a27\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://38d29fa86782b007a60cbc1ea8f2ccc250dabb38c84eb61931b05fbc170e6538\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0fdb8c0687f0a5ce31078f6d7a9b643c41ad23199eff4b2878403ee5fd31f69f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0fdb8c0687f0a5ce31078f6d7a9b643c41ad23199eff4b2878403ee5fd31f69f\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T18:56:45Z\\\",\\\"message\\\":\\\"9 18:56:45.310775 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0929 18:56:45.310981 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI0929 18:56:45.312802 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0929 18:56:45.312836 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0929 18:56:45.312870 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0929 18:56:45.312900 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0929 18:56:45.312941 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI0929 18:56:45.312944 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0929 18:56:45.312985 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI0929 18:56:45.313033 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI0929 18:56:45.313121 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-682747971/tls.crt::/tmp/serving-cert-682747971/tls.key\\\\\\\"\\\\nI0929 18:56:45.313163 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-682747971/tls.crt::/tmp/serving-cert-682747971/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1759172204\\\\\\\\\\\\\\\" (2025-09-29 18:56:43 +0000 UTC to 2025-10-29 18:56:44 +0000 UTC (now=2025-09-29 18:56:45.313121069 +0000 UTC))\\\\\\\"\\\\nF0929 18:56:45.313206 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:44Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://07171a8c3c7812c016ee534ff1332f697d2b2cdfa70fc9d94ae6a5f312e0e433\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:32Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6e733b73d0293211bf2e8e97dc7db49c34e8ac1ef1e6e19013183d8518345959\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6e733b73d0293211bf2e8e97dc7db49c34e8ac1ef1e6e19013183d8518345959\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T18:56:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:29Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:06Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:06 crc kubenswrapper[4792]: I0929 18:57:06.462714 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ba8b9fef5faf6504a0e363f092cc9f60b03723775a0a0624b6302b3dac43a7ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:06Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:06 crc kubenswrapper[4792]: I0929 18:57:06.475684 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8067c4cf598ce2d361c4a76b51ef3cf14d1fc84ad7ee193d76e20cd980f197be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://693ef3ee15f0b8762a16adc20435397e073dea4b0028f4175899cb956eaab303\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:06Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:06 crc kubenswrapper[4792]: I0929 18:57:06.507253 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/1762a3e4-6068-48d9-9b1d-bd5b893803bb-env-overrides\") pod \"ovnkube-control-plane-749d76644c-rr4g5\" (UID: \"1762a3e4-6068-48d9-9b1d-bd5b893803bb\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-rr4g5" Sep 29 18:57:06 crc kubenswrapper[4792]: I0929 18:57:06.507291 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xnnkn\" (UniqueName: \"kubernetes.io/projected/1762a3e4-6068-48d9-9b1d-bd5b893803bb-kube-api-access-xnnkn\") pod \"ovnkube-control-plane-749d76644c-rr4g5\" (UID: \"1762a3e4-6068-48d9-9b1d-bd5b893803bb\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-rr4g5" Sep 29 18:57:06 crc kubenswrapper[4792]: I0929 18:57:06.507310 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/1762a3e4-6068-48d9-9b1d-bd5b893803bb-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-rr4g5\" (UID: \"1762a3e4-6068-48d9-9b1d-bd5b893803bb\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-rr4g5" Sep 29 18:57:06 crc kubenswrapper[4792]: I0929 18:57:06.507325 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/1762a3e4-6068-48d9-9b1d-bd5b893803bb-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-rr4g5\" (UID: \"1762a3e4-6068-48d9-9b1d-bd5b893803bb\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-rr4g5" Sep 29 18:57:06 crc kubenswrapper[4792]: I0929 18:57:06.507878 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/1762a3e4-6068-48d9-9b1d-bd5b893803bb-env-overrides\") pod \"ovnkube-control-plane-749d76644c-rr4g5\" (UID: \"1762a3e4-6068-48d9-9b1d-bd5b893803bb\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-rr4g5" Sep 29 18:57:06 crc kubenswrapper[4792]: I0929 18:57:06.508483 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/1762a3e4-6068-48d9-9b1d-bd5b893803bb-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-rr4g5\" (UID: \"1762a3e4-6068-48d9-9b1d-bd5b893803bb\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-rr4g5" Sep 29 18:57:06 crc kubenswrapper[4792]: I0929 18:57:06.511929 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-hr4cm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"716c5fdd-0e02-4066-9210-93d805b6fe81\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:53Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:53Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7b64445ce1e067504326c5005136522f885ba8796579cfb651019d2372a89173\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://46a3cf64e8fd5f5c75be0dd56175bd00e95e2780c73e39558e3b68ca1e6a44bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3408e50d82d1e7f50d9cd4fb2b4e078059bbc4daba10ca93c3cab56d4fe190be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a5023531ae972c8f19f5fbf8cdb3c4040f1b63d5d7b9d00e885607f0f84c88a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e9625b3628f291ecaa686da104d719695bd8c46eb46d08f9eccab27a2013627\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3a44c0899a9afeaa74bb22565c3f9514603ce1b83f9794539f677d067785925\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a69c10a011d015e4ba98c0b6bdfe1a4d2644bb658d60896c45798ecb564cb563\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a69c10a011d015e4ba98c0b6bdfe1a4d2644bb658d60896c45798ecb564cb563\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T18:57:04Z\\\",\\\"message\\\":\\\".go:208] Removed *v1.Namespace event handler 5\\\\nI0929 18:57:04.475247 5977 handler.go:208] Removed *v1.Node event handler 2\\\\nI0929 18:57:04.475254 5977 handler.go:208] Removed *v1.Node event handler 7\\\\nI0929 18:57:04.475261 5977 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0929 18:57:04.475268 5977 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI0929 18:57:04.475276 5977 handler.go:208] Removed *v1.Pod event handler 3\\\\nI0929 18:57:04.475478 5977 reflector.go:311] Stopping reflector *v1.EgressIP (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI0929 18:57:04.475614 5977 reflector.go:311] Stopping reflector *v1alpha1.BaselineAdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI0929 18:57:04.476210 5977 reflector.go:311] Stopping reflector *v1alpha1.AdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI0929 18:57:04.476299 5977 reflector.go:311] Stopping reflector *v1.Pod (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0929 18:57:04.476380 5977 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0929 18:57:04.476931 5977 factory.go:656] Stopping \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T18:57:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7138892e31e3d1949d0ae4789515fc0fd9868469eb14de1464a2f59786b85f08\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d0516004c2ea4a5711f5e00dcfa01fd5c8d0c0d0d60fd31b0d7da586cd83a820\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d0516004c2ea4a5711f5e00dcfa01fd5c8d0c0d0d60fd31b0d7da586cd83a820\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T18:56:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:53Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-hr4cm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:06Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:06 crc kubenswrapper[4792]: I0929 18:57:06.513026 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:06 crc kubenswrapper[4792]: I0929 18:57:06.513053 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:06 crc kubenswrapper[4792]: I0929 18:57:06.513061 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:06 crc kubenswrapper[4792]: I0929 18:57:06.513075 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:06 crc kubenswrapper[4792]: I0929 18:57:06.513084 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:06Z","lastTransitionTime":"2025-09-29T18:57:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:06 crc kubenswrapper[4792]: I0929 18:57:06.525398 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xnnkn\" (UniqueName: \"kubernetes.io/projected/1762a3e4-6068-48d9-9b1d-bd5b893803bb-kube-api-access-xnnkn\") pod \"ovnkube-control-plane-749d76644c-rr4g5\" (UID: \"1762a3e4-6068-48d9-9b1d-bd5b893803bb\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-rr4g5" Sep 29 18:57:06 crc kubenswrapper[4792]: I0929 18:57:06.531042 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-5hwvp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"100876d3-2539-47f1-91fa-0f91456ccac1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3ff4d11cfba0349ddf3f5a14c525716cfdff95c71698634e8feca328d6e41e2d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfblz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:53Z\\\"}}\" for pod \"openshift-multus\"/\"multus-5hwvp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:06Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:06 crc kubenswrapper[4792]: I0929 18:57:06.545416 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:06Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:06 crc kubenswrapper[4792]: I0929 18:57:06.560733 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:06Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:06 crc kubenswrapper[4792]: I0929 18:57:06.577566 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-c228l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc16dcda-372e-4aac-8c12-148bf93e8783\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://af2529526423852e215c3201a4d8807a880e07e9cf71d593f304a4a3c99900eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6mz5l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:52Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-c228l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:06Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:06 crc kubenswrapper[4792]: I0929 18:57:06.597143 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-rr4g5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1762a3e4-6068-48d9-9b1d-bd5b893803bb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:06Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:06Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xnnkn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xnnkn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:57:06Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-rr4g5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:06Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:06 crc kubenswrapper[4792]: I0929 18:57:06.612137 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:06Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:06 crc kubenswrapper[4792]: I0929 18:57:06.616027 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:06 crc kubenswrapper[4792]: I0929 18:57:06.616058 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:06 crc kubenswrapper[4792]: I0929 18:57:06.616070 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:06 crc kubenswrapper[4792]: I0929 18:57:06.616086 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:06 crc kubenswrapper[4792]: I0929 18:57:06.616098 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:06Z","lastTransitionTime":"2025-09-29T18:57:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:06 crc kubenswrapper[4792]: I0929 18:57:06.624666 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-4gmtk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b84b9e91-b50e-4271-bfc8-be15652128c5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b32084075b7423c8211ca56595a2eb11add581b500043804cb09f13d07788bd6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lc999\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:55Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-4gmtk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:06Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:06 crc kubenswrapper[4792]: I0929 18:57:06.717562 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:06 crc kubenswrapper[4792]: I0929 18:57:06.717609 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:06 crc kubenswrapper[4792]: I0929 18:57:06.717621 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:06 crc kubenswrapper[4792]: I0929 18:57:06.717639 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:06 crc kubenswrapper[4792]: I0929 18:57:06.717652 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:06Z","lastTransitionTime":"2025-09-29T18:57:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:06 crc kubenswrapper[4792]: I0929 18:57:06.819937 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:06 crc kubenswrapper[4792]: I0929 18:57:06.819975 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:06 crc kubenswrapper[4792]: I0929 18:57:06.819983 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:06 crc kubenswrapper[4792]: I0929 18:57:06.819997 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:06 crc kubenswrapper[4792]: I0929 18:57:06.820006 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:06Z","lastTransitionTime":"2025-09-29T18:57:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:06 crc kubenswrapper[4792]: I0929 18:57:06.922117 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:06 crc kubenswrapper[4792]: I0929 18:57:06.922159 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:06 crc kubenswrapper[4792]: I0929 18:57:06.922168 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:06 crc kubenswrapper[4792]: I0929 18:57:06.922184 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:06 crc kubenswrapper[4792]: I0929 18:57:06.922193 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:06Z","lastTransitionTime":"2025-09-29T18:57:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:07 crc kubenswrapper[4792]: I0929 18:57:07.014649 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 18:57:07 crc kubenswrapper[4792]: I0929 18:57:07.014696 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 18:57:07 crc kubenswrapper[4792]: I0929 18:57:07.014654 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 18:57:07 crc kubenswrapper[4792]: E0929 18:57:07.014814 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 18:57:07 crc kubenswrapper[4792]: E0929 18:57:07.014889 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 18:57:07 crc kubenswrapper[4792]: E0929 18:57:07.014945 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 18:57:07 crc kubenswrapper[4792]: I0929 18:57:07.015403 4792 scope.go:117] "RemoveContainer" containerID="0fdb8c0687f0a5ce31078f6d7a9b643c41ad23199eff4b2878403ee5fd31f69f" Sep 29 18:57:07 crc kubenswrapper[4792]: I0929 18:57:07.024395 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:07 crc kubenswrapper[4792]: I0929 18:57:07.024428 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:07 crc kubenswrapper[4792]: I0929 18:57:07.024437 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:07 crc kubenswrapper[4792]: I0929 18:57:07.024451 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:07 crc kubenswrapper[4792]: I0929 18:57:07.024466 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:07Z","lastTransitionTime":"2025-09-29T18:57:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:07 crc kubenswrapper[4792]: I0929 18:57:07.127386 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:07 crc kubenswrapper[4792]: I0929 18:57:07.127703 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:07 crc kubenswrapper[4792]: I0929 18:57:07.127711 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:07 crc kubenswrapper[4792]: I0929 18:57:07.127727 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:07 crc kubenswrapper[4792]: I0929 18:57:07.127738 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:07Z","lastTransitionTime":"2025-09-29T18:57:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:07 crc kubenswrapper[4792]: I0929 18:57:07.229317 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:07 crc kubenswrapper[4792]: I0929 18:57:07.229486 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:07 crc kubenswrapper[4792]: I0929 18:57:07.229581 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:07 crc kubenswrapper[4792]: I0929 18:57:07.229668 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:07 crc kubenswrapper[4792]: I0929 18:57:07.229754 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:07Z","lastTransitionTime":"2025-09-29T18:57:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:07 crc kubenswrapper[4792]: I0929 18:57:07.296922 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-hr4cm_716c5fdd-0e02-4066-9210-93d805b6fe81/ovnkube-controller/1.log" Sep 29 18:57:07 crc kubenswrapper[4792]: I0929 18:57:07.297491 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-hr4cm_716c5fdd-0e02-4066-9210-93d805b6fe81/ovnkube-controller/0.log" Sep 29 18:57:07 crc kubenswrapper[4792]: I0929 18:57:07.299892 4792 generic.go:334] "Generic (PLEG): container finished" podID="716c5fdd-0e02-4066-9210-93d805b6fe81" containerID="e402552ce11d3c59a676cb86052c03bd0f6ef0c17950b38a05cb3ae1f56e205d" exitCode=1 Sep 29 18:57:07 crc kubenswrapper[4792]: I0929 18:57:07.299931 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-hr4cm" event={"ID":"716c5fdd-0e02-4066-9210-93d805b6fe81","Type":"ContainerDied","Data":"e402552ce11d3c59a676cb86052c03bd0f6ef0c17950b38a05cb3ae1f56e205d"} Sep 29 18:57:07 crc kubenswrapper[4792]: I0929 18:57:07.300106 4792 scope.go:117] "RemoveContainer" containerID="a69c10a011d015e4ba98c0b6bdfe1a4d2644bb658d60896c45798ecb564cb563" Sep 29 18:57:07 crc kubenswrapper[4792]: I0929 18:57:07.300745 4792 scope.go:117] "RemoveContainer" containerID="e402552ce11d3c59a676cb86052c03bd0f6ef0c17950b38a05cb3ae1f56e205d" Sep 29 18:57:07 crc kubenswrapper[4792]: E0929 18:57:07.300912 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-hr4cm_openshift-ovn-kubernetes(716c5fdd-0e02-4066-9210-93d805b6fe81)\"" pod="openshift-ovn-kubernetes/ovnkube-node-hr4cm" podUID="716c5fdd-0e02-4066-9210-93d805b6fe81" Sep 29 18:57:07 crc kubenswrapper[4792]: I0929 18:57:07.302079 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/1.log" Sep 29 18:57:07 crc kubenswrapper[4792]: I0929 18:57:07.303876 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"5f292b6e3b4a31cc851066c3112871836e2c896dea8913da0d3c5579fe5ebb65"} Sep 29 18:57:07 crc kubenswrapper[4792]: I0929 18:57:07.304093 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 29 18:57:07 crc kubenswrapper[4792]: I0929 18:57:07.317258 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:07Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:07 crc kubenswrapper[4792]: I0929 18:57:07.329901 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-4gmtk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b84b9e91-b50e-4271-bfc8-be15652128c5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b32084075b7423c8211ca56595a2eb11add581b500043804cb09f13d07788bd6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lc999\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:55Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-4gmtk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:07Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:07 crc kubenswrapper[4792]: I0929 18:57:07.331973 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:07 crc kubenswrapper[4792]: I0929 18:57:07.331999 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:07 crc kubenswrapper[4792]: I0929 18:57:07.332010 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:07 crc kubenswrapper[4792]: I0929 18:57:07.332028 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:07 crc kubenswrapper[4792]: I0929 18:57:07.332041 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:07Z","lastTransitionTime":"2025-09-29T18:57:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:07 crc kubenswrapper[4792]: I0929 18:57:07.342799 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0ae66548-086e-4ca9-bd6f-281ce46e7557\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b27d8e307d9f6545acd48d9a838dc98fec84ca2e48b357966af22144b8cd415f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://305645f1f10b20984067c3d0d32bc9a5936e191faecff2bb494be005fc471c65\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:53Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-p5q59\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:07Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:07 crc kubenswrapper[4792]: I0929 18:57:07.358154 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-rqbjv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"67c58ee5-e056-4e3e-91ed-a116350f2408\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://17470ef608c6b717c0346349c1e72046e200b3879df2772778878b0e83c05b7e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:57:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ms9xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cdd799a430b3a444b7ba74ae8c285de28790049390a462485812fe117f9dfbe8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cdd799a430b3a444b7ba74ae8c285de28790049390a462485812fe117f9dfbe8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T18:56:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ms9xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1d779fed3cf67ba40d6664f26d829858ec14749c48c09678b73d7fb8fe73c827\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1d779fed3cf67ba40d6664f26d829858ec14749c48c09678b73d7fb8fe73c827\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T18:56:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ms9xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3094a1172df2fd98e699c4d368a14584e51bce43389c9c6432e24e78d460a3eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3094a1172df2fd98e699c4d368a14584e51bce43389c9c6432e24e78d460a3eb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T18:56:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ms9xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://79dfa5c03ec31df7b6477646c437b7490658801c0b8f7fac5e9149e4c7a882bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://79dfa5c03ec31df7b6477646c437b7490658801c0b8f7fac5e9149e4c7a882bf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T18:56:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ms9xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://aed6e427a87d4a4617a9d1c9a4d37cf2f9815d1759336026545d563b1f9b6976\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aed6e427a87d4a4617a9d1c9a4d37cf2f9815d1759336026545d563b1f9b6976\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T18:57:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ms9xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0830e9f46c282fc2be6beeea2654758eb0b3a0a86b802f495928c846bc49f7ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0830e9f46c282fc2be6beeea2654758eb0b3a0a86b802f495928c846bc49f7ed\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T18:57:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T18:57:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ms9xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:53Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-rqbjv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:07Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:07 crc kubenswrapper[4792]: I0929 18:57:07.368521 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"49187618-8fed-4b0f-bdf8-800408f708fc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://325b543480e9e1abd49c6ce98398a79ef51983b8035774b2e88447ee547733c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://12d3875b8db9620798f766024b1bc43b78759f42e467b67aaf87f0b0154a8fad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://100ab44da711fddded7f88aa053b6a47d1c8302557d9ae6a56d4f744140e34bd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://af34e705a941f92c031edf3d214a902640010036401914f60e598a46043d5eb3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:29Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:07Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:07 crc kubenswrapper[4792]: I0929 18:57:07.381951 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:52Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:52Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cb29207afd9a5fb06242890aaf6d32f2f789cbf824b0246706e7214486ac529c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:07Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:07 crc kubenswrapper[4792]: I0929 18:57:07.394760 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8067c4cf598ce2d361c4a76b51ef3cf14d1fc84ad7ee193d76e20cd980f197be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://693ef3ee15f0b8762a16adc20435397e073dea4b0028f4175899cb956eaab303\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:07Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:07 crc kubenswrapper[4792]: I0929 18:57:07.417629 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-hr4cm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"716c5fdd-0e02-4066-9210-93d805b6fe81\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:53Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:53Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7b64445ce1e067504326c5005136522f885ba8796579cfb651019d2372a89173\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://46a3cf64e8fd5f5c75be0dd56175bd00e95e2780c73e39558e3b68ca1e6a44bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3408e50d82d1e7f50d9cd4fb2b4e078059bbc4daba10ca93c3cab56d4fe190be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a5023531ae972c8f19f5fbf8cdb3c4040f1b63d5d7b9d00e885607f0f84c88a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e9625b3628f291ecaa686da104d719695bd8c46eb46d08f9eccab27a2013627\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3a44c0899a9afeaa74bb22565c3f9514603ce1b83f9794539f677d067785925\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e402552ce11d3c59a676cb86052c03bd0f6ef0c17950b38a05cb3ae1f56e205d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a69c10a011d015e4ba98c0b6bdfe1a4d2644bb658d60896c45798ecb564cb563\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T18:57:04Z\\\",\\\"message\\\":\\\".go:208] Removed *v1.Namespace event handler 5\\\\nI0929 18:57:04.475247 5977 handler.go:208] Removed *v1.Node event handler 2\\\\nI0929 18:57:04.475254 5977 handler.go:208] Removed *v1.Node event handler 7\\\\nI0929 18:57:04.475261 5977 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0929 18:57:04.475268 5977 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI0929 18:57:04.475276 5977 handler.go:208] Removed *v1.Pod event handler 3\\\\nI0929 18:57:04.475478 5977 reflector.go:311] Stopping reflector *v1.EgressIP (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI0929 18:57:04.475614 5977 reflector.go:311] Stopping reflector *v1alpha1.BaselineAdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI0929 18:57:04.476210 5977 reflector.go:311] Stopping reflector *v1alpha1.AdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI0929 18:57:04.476299 5977 reflector.go:311] Stopping reflector *v1.Pod (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0929 18:57:04.476380 5977 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0929 18:57:04.476931 5977 factory.go:656] Stopping \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T18:57:00Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e402552ce11d3c59a676cb86052c03bd0f6ef0c17950b38a05cb3ae1f56e205d\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T18:57:07Z\\\",\\\"message\\\":\\\"DName:}]\\\\nI0929 18:57:07.047441 6143 transact.go:42] Configuring OVN: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-marketplace/redhat-marketplace]} name:Service_openshift-marketplace/redhat-marketplace_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.5.140:50051:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {97b6e7b0-06ca-455e-8259-06895040cb0c}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI0929 18:57:07.047477 6143 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF0929 18:57:07.047535 6143 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: handler {0x1fe5060 0x1fe4d40 0x1fe4ce0} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: cer\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T18:57:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7138892e31e3d1949d0ae4789515fc0fd9868469eb14de1464a2f59786b85f08\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d0516004c2ea4a5711f5e00dcfa01fd5c8d0c0d0d60fd31b0d7da586cd83a820\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d0516004c2ea4a5711f5e00dcfa01fd5c8d0c0d0d60fd31b0d7da586cd83a820\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T18:56:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:53Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-hr4cm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:07Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:07 crc kubenswrapper[4792]: I0929 18:57:07.431083 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-5hwvp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"100876d3-2539-47f1-91fa-0f91456ccac1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3ff4d11cfba0349ddf3f5a14c525716cfdff95c71698634e8feca328d6e41e2d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfblz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:53Z\\\"}}\" for pod \"openshift-multus\"/\"multus-5hwvp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:07Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:07 crc kubenswrapper[4792]: I0929 18:57:07.434653 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:07 crc kubenswrapper[4792]: I0929 18:57:07.434681 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:07 crc kubenswrapper[4792]: I0929 18:57:07.434691 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:07 crc kubenswrapper[4792]: I0929 18:57:07.434707 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:07 crc kubenswrapper[4792]: I0929 18:57:07.434719 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:07Z","lastTransitionTime":"2025-09-29T18:57:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:07 crc kubenswrapper[4792]: I0929 18:57:07.447363 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"10bc9cb6-78d3-43a6-8276-db1cb1c116e0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:29Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:29Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://238035b6ad975064a44e7e9e760ae9f09c9ff2735ecc69f65df3fc3176f6d3b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://659f798faed7d5c35bf7959b8e42a37f2289854714d513962716cb73a0674a27\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://38d29fa86782b007a60cbc1ea8f2ccc250dabb38c84eb61931b05fbc170e6538\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0fdb8c0687f0a5ce31078f6d7a9b643c41ad23199eff4b2878403ee5fd31f69f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0fdb8c0687f0a5ce31078f6d7a9b643c41ad23199eff4b2878403ee5fd31f69f\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T18:56:45Z\\\",\\\"message\\\":\\\"9 18:56:45.310775 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0929 18:56:45.310981 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI0929 18:56:45.312802 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0929 18:56:45.312836 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0929 18:56:45.312870 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0929 18:56:45.312900 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0929 18:56:45.312941 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI0929 18:56:45.312944 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0929 18:56:45.312985 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI0929 18:56:45.313033 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI0929 18:56:45.313121 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-682747971/tls.crt::/tmp/serving-cert-682747971/tls.key\\\\\\\"\\\\nI0929 18:56:45.313163 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-682747971/tls.crt::/tmp/serving-cert-682747971/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1759172204\\\\\\\\\\\\\\\" (2025-09-29 18:56:43 +0000 UTC to 2025-10-29 18:56:44 +0000 UTC (now=2025-09-29 18:56:45.313121069 +0000 UTC))\\\\\\\"\\\\nF0929 18:56:45.313206 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:44Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://07171a8c3c7812c016ee534ff1332f697d2b2cdfa70fc9d94ae6a5f312e0e433\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:32Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6e733b73d0293211bf2e8e97dc7db49c34e8ac1ef1e6e19013183d8518345959\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6e733b73d0293211bf2e8e97dc7db49c34e8ac1ef1e6e19013183d8518345959\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T18:56:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:29Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:07Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:07 crc kubenswrapper[4792]: I0929 18:57:07.455285 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/network-metrics-daemon-v5b2m"] Sep 29 18:57:07 crc kubenswrapper[4792]: I0929 18:57:07.455971 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-v5b2m" Sep 29 18:57:07 crc kubenswrapper[4792]: E0929 18:57:07.456144 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-v5b2m" podUID="fd292349-0e5a-4d80-b163-193aa43c98db" Sep 29 18:57:07 crc kubenswrapper[4792]: I0929 18:57:07.462036 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ba8b9fef5faf6504a0e363f092cc9f60b03723775a0a0624b6302b3dac43a7ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:07Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:07 crc kubenswrapper[4792]: I0929 18:57:07.483457 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:07Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:07 crc kubenswrapper[4792]: I0929 18:57:07.498577 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-c228l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc16dcda-372e-4aac-8c12-148bf93e8783\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://af2529526423852e215c3201a4d8807a880e07e9cf71d593f304a4a3c99900eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6mz5l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:52Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-c228l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:07Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:07 crc kubenswrapper[4792]: E0929 18:57:07.508383 4792 secret.go:188] Couldn't get secret openshift-ovn-kubernetes/ovn-control-plane-metrics-cert: failed to sync secret cache: timed out waiting for the condition Sep 29 18:57:07 crc kubenswrapper[4792]: E0929 18:57:07.508542 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/1762a3e4-6068-48d9-9b1d-bd5b893803bb-ovn-control-plane-metrics-cert podName:1762a3e4-6068-48d9-9b1d-bd5b893803bb nodeName:}" failed. No retries permitted until 2025-09-29 18:57:08.00851282 +0000 UTC m=+40.001820226 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "ovn-control-plane-metrics-cert" (UniqueName: "kubernetes.io/secret/1762a3e4-6068-48d9-9b1d-bd5b893803bb-ovn-control-plane-metrics-cert") pod "ovnkube-control-plane-749d76644c-rr4g5" (UID: "1762a3e4-6068-48d9-9b1d-bd5b893803bb") : failed to sync secret cache: timed out waiting for the condition Sep 29 18:57:07 crc kubenswrapper[4792]: I0929 18:57:07.516868 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/fd292349-0e5a-4d80-b163-193aa43c98db-metrics-certs\") pod \"network-metrics-daemon-v5b2m\" (UID: \"fd292349-0e5a-4d80-b163-193aa43c98db\") " pod="openshift-multus/network-metrics-daemon-v5b2m" Sep 29 18:57:07 crc kubenswrapper[4792]: I0929 18:57:07.516906 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d8ps7\" (UniqueName: \"kubernetes.io/projected/fd292349-0e5a-4d80-b163-193aa43c98db-kube-api-access-d8ps7\") pod \"network-metrics-daemon-v5b2m\" (UID: \"fd292349-0e5a-4d80-b163-193aa43c98db\") " pod="openshift-multus/network-metrics-daemon-v5b2m" Sep 29 18:57:07 crc kubenswrapper[4792]: I0929 18:57:07.536701 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:07 crc kubenswrapper[4792]: I0929 18:57:07.536731 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:07 crc kubenswrapper[4792]: I0929 18:57:07.536739 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:07 crc kubenswrapper[4792]: I0929 18:57:07.536750 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:07 crc kubenswrapper[4792]: I0929 18:57:07.536759 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:07Z","lastTransitionTime":"2025-09-29T18:57:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:07 crc kubenswrapper[4792]: I0929 18:57:07.559962 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-rr4g5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1762a3e4-6068-48d9-9b1d-bd5b893803bb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:06Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:06Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xnnkn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xnnkn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:57:06Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-rr4g5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:07Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:07 crc kubenswrapper[4792]: I0929 18:57:07.579586 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:07Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:07 crc kubenswrapper[4792]: I0929 18:57:07.592598 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-v5b2m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fd292349-0e5a-4d80-b163-193aa43c98db\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:07Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:07Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:07Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d8ps7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d8ps7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:57:07Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-v5b2m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:07Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:07 crc kubenswrapper[4792]: I0929 18:57:07.603140 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"49187618-8fed-4b0f-bdf8-800408f708fc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://325b543480e9e1abd49c6ce98398a79ef51983b8035774b2e88447ee547733c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://12d3875b8db9620798f766024b1bc43b78759f42e467b67aaf87f0b0154a8fad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://100ab44da711fddded7f88aa053b6a47d1c8302557d9ae6a56d4f744140e34bd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://af34e705a941f92c031edf3d214a902640010036401914f60e598a46043d5eb3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:29Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:07Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:07 crc kubenswrapper[4792]: I0929 18:57:07.605680 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-control-plane-dockercfg-gs7dd" Sep 29 18:57:07 crc kubenswrapper[4792]: I0929 18:57:07.615478 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:52Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:52Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cb29207afd9a5fb06242890aaf6d32f2f789cbf824b0246706e7214486ac529c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:07Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:07 crc kubenswrapper[4792]: I0929 18:57:07.617540 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/fd292349-0e5a-4d80-b163-193aa43c98db-metrics-certs\") pod \"network-metrics-daemon-v5b2m\" (UID: \"fd292349-0e5a-4d80-b163-193aa43c98db\") " pod="openshift-multus/network-metrics-daemon-v5b2m" Sep 29 18:57:07 crc kubenswrapper[4792]: I0929 18:57:07.617585 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d8ps7\" (UniqueName: \"kubernetes.io/projected/fd292349-0e5a-4d80-b163-193aa43c98db-kube-api-access-d8ps7\") pod \"network-metrics-daemon-v5b2m\" (UID: \"fd292349-0e5a-4d80-b163-193aa43c98db\") " pod="openshift-multus/network-metrics-daemon-v5b2m" Sep 29 18:57:07 crc kubenswrapper[4792]: E0929 18:57:07.617694 4792 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Sep 29 18:57:07 crc kubenswrapper[4792]: E0929 18:57:07.617768 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/fd292349-0e5a-4d80-b163-193aa43c98db-metrics-certs podName:fd292349-0e5a-4d80-b163-193aa43c98db nodeName:}" failed. No retries permitted until 2025-09-29 18:57:08.117751371 +0000 UTC m=+40.111058767 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/fd292349-0e5a-4d80-b163-193aa43c98db-metrics-certs") pod "network-metrics-daemon-v5b2m" (UID: "fd292349-0e5a-4d80-b163-193aa43c98db") : object "openshift-multus"/"metrics-daemon-secret" not registered Sep 29 18:57:07 crc kubenswrapper[4792]: I0929 18:57:07.627364 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0ae66548-086e-4ca9-bd6f-281ce46e7557\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b27d8e307d9f6545acd48d9a838dc98fec84ca2e48b357966af22144b8cd415f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://305645f1f10b20984067c3d0d32bc9a5936e191faecff2bb494be005fc471c65\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:53Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-p5q59\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:07Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:07 crc kubenswrapper[4792]: I0929 18:57:07.636437 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d8ps7\" (UniqueName: \"kubernetes.io/projected/fd292349-0e5a-4d80-b163-193aa43c98db-kube-api-access-d8ps7\") pod \"network-metrics-daemon-v5b2m\" (UID: \"fd292349-0e5a-4d80-b163-193aa43c98db\") " pod="openshift-multus/network-metrics-daemon-v5b2m" Sep 29 18:57:07 crc kubenswrapper[4792]: I0929 18:57:07.638303 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:07 crc kubenswrapper[4792]: I0929 18:57:07.638354 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:07 crc kubenswrapper[4792]: I0929 18:57:07.638363 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:07 crc kubenswrapper[4792]: I0929 18:57:07.638376 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:07 crc kubenswrapper[4792]: I0929 18:57:07.638406 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:07Z","lastTransitionTime":"2025-09-29T18:57:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:07 crc kubenswrapper[4792]: I0929 18:57:07.641156 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-rqbjv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"67c58ee5-e056-4e3e-91ed-a116350f2408\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://17470ef608c6b717c0346349c1e72046e200b3879df2772778878b0e83c05b7e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:57:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ms9xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cdd799a430b3a444b7ba74ae8c285de28790049390a462485812fe117f9dfbe8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cdd799a430b3a444b7ba74ae8c285de28790049390a462485812fe117f9dfbe8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T18:56:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ms9xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1d779fed3cf67ba40d6664f26d829858ec14749c48c09678b73d7fb8fe73c827\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1d779fed3cf67ba40d6664f26d829858ec14749c48c09678b73d7fb8fe73c827\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T18:56:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ms9xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3094a1172df2fd98e699c4d368a14584e51bce43389c9c6432e24e78d460a3eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3094a1172df2fd98e699c4d368a14584e51bce43389c9c6432e24e78d460a3eb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T18:56:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ms9xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://79dfa5c03ec31df7b6477646c437b7490658801c0b8f7fac5e9149e4c7a882bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://79dfa5c03ec31df7b6477646c437b7490658801c0b8f7fac5e9149e4c7a882bf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T18:56:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ms9xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://aed6e427a87d4a4617a9d1c9a4d37cf2f9815d1759336026545d563b1f9b6976\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aed6e427a87d4a4617a9d1c9a4d37cf2f9815d1759336026545d563b1f9b6976\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T18:57:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ms9xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0830e9f46c282fc2be6beeea2654758eb0b3a0a86b802f495928c846bc49f7ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0830e9f46c282fc2be6beeea2654758eb0b3a0a86b802f495928c846bc49f7ed\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T18:57:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T18:57:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ms9xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:53Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-rqbjv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:07Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:07 crc kubenswrapper[4792]: I0929 18:57:07.652173 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-5hwvp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"100876d3-2539-47f1-91fa-0f91456ccac1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3ff4d11cfba0349ddf3f5a14c525716cfdff95c71698634e8feca328d6e41e2d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfblz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:53Z\\\"}}\" for pod \"openshift-multus\"/\"multus-5hwvp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:07Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:07 crc kubenswrapper[4792]: I0929 18:57:07.663755 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"10bc9cb6-78d3-43a6-8276-db1cb1c116e0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:29Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:29Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://238035b6ad975064a44e7e9e760ae9f09c9ff2735ecc69f65df3fc3176f6d3b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://659f798faed7d5c35bf7959b8e42a37f2289854714d513962716cb73a0674a27\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://38d29fa86782b007a60cbc1ea8f2ccc250dabb38c84eb61931b05fbc170e6538\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f292b6e3b4a31cc851066c3112871836e2c896dea8913da0d3c5579fe5ebb65\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0fdb8c0687f0a5ce31078f6d7a9b643c41ad23199eff4b2878403ee5fd31f69f\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T18:56:45Z\\\",\\\"message\\\":\\\"9 18:56:45.310775 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0929 18:56:45.310981 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI0929 18:56:45.312802 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0929 18:56:45.312836 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0929 18:56:45.312870 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0929 18:56:45.312900 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0929 18:56:45.312941 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI0929 18:56:45.312944 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0929 18:56:45.312985 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI0929 18:56:45.313033 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI0929 18:56:45.313121 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-682747971/tls.crt::/tmp/serving-cert-682747971/tls.key\\\\\\\"\\\\nI0929 18:56:45.313163 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-682747971/tls.crt::/tmp/serving-cert-682747971/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1759172204\\\\\\\\\\\\\\\" (2025-09-29 18:56:43 +0000 UTC to 2025-10-29 18:56:44 +0000 UTC (now=2025-09-29 18:56:45.313121069 +0000 UTC))\\\\\\\"\\\\nF0929 18:56:45.313206 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:44Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:57:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://07171a8c3c7812c016ee534ff1332f697d2b2cdfa70fc9d94ae6a5f312e0e433\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:32Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6e733b73d0293211bf2e8e97dc7db49c34e8ac1ef1e6e19013183d8518345959\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6e733b73d0293211bf2e8e97dc7db49c34e8ac1ef1e6e19013183d8518345959\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T18:56:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:29Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:07Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:07 crc kubenswrapper[4792]: I0929 18:57:07.674160 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ba8b9fef5faf6504a0e363f092cc9f60b03723775a0a0624b6302b3dac43a7ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:07Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:07 crc kubenswrapper[4792]: I0929 18:57:07.684359 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8067c4cf598ce2d361c4a76b51ef3cf14d1fc84ad7ee193d76e20cd980f197be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://693ef3ee15f0b8762a16adc20435397e073dea4b0028f4175899cb956eaab303\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:07Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:07 crc kubenswrapper[4792]: I0929 18:57:07.699157 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-hr4cm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"716c5fdd-0e02-4066-9210-93d805b6fe81\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:53Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:53Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7b64445ce1e067504326c5005136522f885ba8796579cfb651019d2372a89173\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://46a3cf64e8fd5f5c75be0dd56175bd00e95e2780c73e39558e3b68ca1e6a44bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3408e50d82d1e7f50d9cd4fb2b4e078059bbc4daba10ca93c3cab56d4fe190be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a5023531ae972c8f19f5fbf8cdb3c4040f1b63d5d7b9d00e885607f0f84c88a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e9625b3628f291ecaa686da104d719695bd8c46eb46d08f9eccab27a2013627\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3a44c0899a9afeaa74bb22565c3f9514603ce1b83f9794539f677d067785925\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e402552ce11d3c59a676cb86052c03bd0f6ef0c17950b38a05cb3ae1f56e205d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a69c10a011d015e4ba98c0b6bdfe1a4d2644bb658d60896c45798ecb564cb563\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T18:57:04Z\\\",\\\"message\\\":\\\".go:208] Removed *v1.Namespace event handler 5\\\\nI0929 18:57:04.475247 5977 handler.go:208] Removed *v1.Node event handler 2\\\\nI0929 18:57:04.475254 5977 handler.go:208] Removed *v1.Node event handler 7\\\\nI0929 18:57:04.475261 5977 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0929 18:57:04.475268 5977 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI0929 18:57:04.475276 5977 handler.go:208] Removed *v1.Pod event handler 3\\\\nI0929 18:57:04.475478 5977 reflector.go:311] Stopping reflector *v1.EgressIP (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI0929 18:57:04.475614 5977 reflector.go:311] Stopping reflector *v1alpha1.BaselineAdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI0929 18:57:04.476210 5977 reflector.go:311] Stopping reflector *v1alpha1.AdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI0929 18:57:04.476299 5977 reflector.go:311] Stopping reflector *v1.Pod (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0929 18:57:04.476380 5977 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0929 18:57:04.476931 5977 factory.go:656] Stopping \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T18:57:00Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e402552ce11d3c59a676cb86052c03bd0f6ef0c17950b38a05cb3ae1f56e205d\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T18:57:07Z\\\",\\\"message\\\":\\\"DName:}]\\\\nI0929 18:57:07.047441 6143 transact.go:42] Configuring OVN: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-marketplace/redhat-marketplace]} name:Service_openshift-marketplace/redhat-marketplace_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.5.140:50051:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {97b6e7b0-06ca-455e-8259-06895040cb0c}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI0929 18:57:07.047477 6143 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF0929 18:57:07.047535 6143 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: handler {0x1fe5060 0x1fe4d40 0x1fe4ce0} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: cer\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T18:57:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7138892e31e3d1949d0ae4789515fc0fd9868469eb14de1464a2f59786b85f08\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d0516004c2ea4a5711f5e00dcfa01fd5c8d0c0d0d60fd31b0d7da586cd83a820\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d0516004c2ea4a5711f5e00dcfa01fd5c8d0c0d0d60fd31b0d7da586cd83a820\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T18:56:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:53Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-hr4cm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:07Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:07 crc kubenswrapper[4792]: I0929 18:57:07.720222 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-rr4g5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1762a3e4-6068-48d9-9b1d-bd5b893803bb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:06Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:06Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xnnkn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xnnkn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:57:06Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-rr4g5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:07Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:07 crc kubenswrapper[4792]: I0929 18:57:07.731586 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:07Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:07 crc kubenswrapper[4792]: I0929 18:57:07.740865 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:07 crc kubenswrapper[4792]: I0929 18:57:07.740892 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:07 crc kubenswrapper[4792]: I0929 18:57:07.740900 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:07 crc kubenswrapper[4792]: I0929 18:57:07.740913 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:07 crc kubenswrapper[4792]: I0929 18:57:07.740921 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:07Z","lastTransitionTime":"2025-09-29T18:57:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:07 crc kubenswrapper[4792]: I0929 18:57:07.743349 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:07Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:07 crc kubenswrapper[4792]: I0929 18:57:07.752409 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-c228l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc16dcda-372e-4aac-8c12-148bf93e8783\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://af2529526423852e215c3201a4d8807a880e07e9cf71d593f304a4a3c99900eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6mz5l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:52Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-c228l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:07Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:07 crc kubenswrapper[4792]: I0929 18:57:07.761925 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:07Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:07 crc kubenswrapper[4792]: I0929 18:57:07.769737 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-4gmtk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b84b9e91-b50e-4271-bfc8-be15652128c5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b32084075b7423c8211ca56595a2eb11add581b500043804cb09f13d07788bd6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lc999\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:55Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-4gmtk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:07Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:07 crc kubenswrapper[4792]: I0929 18:57:07.843460 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:07 crc kubenswrapper[4792]: I0929 18:57:07.843498 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:07 crc kubenswrapper[4792]: I0929 18:57:07.843509 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:07 crc kubenswrapper[4792]: I0929 18:57:07.843527 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:07 crc kubenswrapper[4792]: I0929 18:57:07.843539 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:07Z","lastTransitionTime":"2025-09-29T18:57:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:07 crc kubenswrapper[4792]: I0929 18:57:07.936032 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-control-plane-metrics-cert" Sep 29 18:57:07 crc kubenswrapper[4792]: I0929 18:57:07.945895 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:07 crc kubenswrapper[4792]: I0929 18:57:07.945941 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:07 crc kubenswrapper[4792]: I0929 18:57:07.945954 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:07 crc kubenswrapper[4792]: I0929 18:57:07.945976 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:07 crc kubenswrapper[4792]: I0929 18:57:07.946006 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:07Z","lastTransitionTime":"2025-09-29T18:57:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:08 crc kubenswrapper[4792]: I0929 18:57:08.021304 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/1762a3e4-6068-48d9-9b1d-bd5b893803bb-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-rr4g5\" (UID: \"1762a3e4-6068-48d9-9b1d-bd5b893803bb\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-rr4g5" Sep 29 18:57:08 crc kubenswrapper[4792]: I0929 18:57:08.024818 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/1762a3e4-6068-48d9-9b1d-bd5b893803bb-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-rr4g5\" (UID: \"1762a3e4-6068-48d9-9b1d-bd5b893803bb\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-rr4g5" Sep 29 18:57:08 crc kubenswrapper[4792]: I0929 18:57:08.047761 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:08 crc kubenswrapper[4792]: I0929 18:57:08.047796 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:08 crc kubenswrapper[4792]: I0929 18:57:08.047807 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:08 crc kubenswrapper[4792]: I0929 18:57:08.047821 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:08 crc kubenswrapper[4792]: I0929 18:57:08.047832 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:08Z","lastTransitionTime":"2025-09-29T18:57:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:08 crc kubenswrapper[4792]: I0929 18:57:08.121963 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/fd292349-0e5a-4d80-b163-193aa43c98db-metrics-certs\") pod \"network-metrics-daemon-v5b2m\" (UID: \"fd292349-0e5a-4d80-b163-193aa43c98db\") " pod="openshift-multus/network-metrics-daemon-v5b2m" Sep 29 18:57:08 crc kubenswrapper[4792]: E0929 18:57:08.122200 4792 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Sep 29 18:57:08 crc kubenswrapper[4792]: E0929 18:57:08.122302 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/fd292349-0e5a-4d80-b163-193aa43c98db-metrics-certs podName:fd292349-0e5a-4d80-b163-193aa43c98db nodeName:}" failed. No retries permitted until 2025-09-29 18:57:09.122280254 +0000 UTC m=+41.115587750 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/fd292349-0e5a-4d80-b163-193aa43c98db-metrics-certs") pod "network-metrics-daemon-v5b2m" (UID: "fd292349-0e5a-4d80-b163-193aa43c98db") : object "openshift-multus"/"metrics-daemon-secret" not registered Sep 29 18:57:08 crc kubenswrapper[4792]: I0929 18:57:08.150536 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:08 crc kubenswrapper[4792]: I0929 18:57:08.150784 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:08 crc kubenswrapper[4792]: I0929 18:57:08.150869 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:08 crc kubenswrapper[4792]: I0929 18:57:08.150941 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:08 crc kubenswrapper[4792]: I0929 18:57:08.151006 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:08Z","lastTransitionTime":"2025-09-29T18:57:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:08 crc kubenswrapper[4792]: I0929 18:57:08.193075 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-rr4g5" Sep 29 18:57:08 crc kubenswrapper[4792]: W0929 18:57:08.213580 4792 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1762a3e4_6068_48d9_9b1d_bd5b893803bb.slice/crio-fd671def0ec8548deb4bf2f68e67f84d87db499a3ea5cc47b3ad989c807b642f WatchSource:0}: Error finding container fd671def0ec8548deb4bf2f68e67f84d87db499a3ea5cc47b3ad989c807b642f: Status 404 returned error can't find the container with id fd671def0ec8548deb4bf2f68e67f84d87db499a3ea5cc47b3ad989c807b642f Sep 29 18:57:08 crc kubenswrapper[4792]: I0929 18:57:08.254502 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:08 crc kubenswrapper[4792]: I0929 18:57:08.254578 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:08 crc kubenswrapper[4792]: I0929 18:57:08.254594 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:08 crc kubenswrapper[4792]: I0929 18:57:08.254611 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:08 crc kubenswrapper[4792]: I0929 18:57:08.254622 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:08Z","lastTransitionTime":"2025-09-29T18:57:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:08 crc kubenswrapper[4792]: I0929 18:57:08.308495 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-hr4cm_716c5fdd-0e02-4066-9210-93d805b6fe81/ovnkube-controller/1.log" Sep 29 18:57:08 crc kubenswrapper[4792]: I0929 18:57:08.311489 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-rr4g5" event={"ID":"1762a3e4-6068-48d9-9b1d-bd5b893803bb","Type":"ContainerStarted","Data":"fd671def0ec8548deb4bf2f68e67f84d87db499a3ea5cc47b3ad989c807b642f"} Sep 29 18:57:08 crc kubenswrapper[4792]: I0929 18:57:08.355984 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:08 crc kubenswrapper[4792]: I0929 18:57:08.356018 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:08 crc kubenswrapper[4792]: I0929 18:57:08.356028 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:08 crc kubenswrapper[4792]: I0929 18:57:08.356043 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:08 crc kubenswrapper[4792]: I0929 18:57:08.356054 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:08Z","lastTransitionTime":"2025-09-29T18:57:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:08 crc kubenswrapper[4792]: I0929 18:57:08.458528 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:08 crc kubenswrapper[4792]: I0929 18:57:08.458565 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:08 crc kubenswrapper[4792]: I0929 18:57:08.458576 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:08 crc kubenswrapper[4792]: I0929 18:57:08.458593 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:08 crc kubenswrapper[4792]: I0929 18:57:08.458606 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:08Z","lastTransitionTime":"2025-09-29T18:57:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:08 crc kubenswrapper[4792]: I0929 18:57:08.560957 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:08 crc kubenswrapper[4792]: I0929 18:57:08.561001 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:08 crc kubenswrapper[4792]: I0929 18:57:08.561011 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:08 crc kubenswrapper[4792]: I0929 18:57:08.561023 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:08 crc kubenswrapper[4792]: I0929 18:57:08.561032 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:08Z","lastTransitionTime":"2025-09-29T18:57:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:08 crc kubenswrapper[4792]: I0929 18:57:08.663592 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:08 crc kubenswrapper[4792]: I0929 18:57:08.663635 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:08 crc kubenswrapper[4792]: I0929 18:57:08.663643 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:08 crc kubenswrapper[4792]: I0929 18:57:08.663657 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:08 crc kubenswrapper[4792]: I0929 18:57:08.663665 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:08Z","lastTransitionTime":"2025-09-29T18:57:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:08 crc kubenswrapper[4792]: I0929 18:57:08.766258 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:08 crc kubenswrapper[4792]: I0929 18:57:08.766480 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:08 crc kubenswrapper[4792]: I0929 18:57:08.766489 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:08 crc kubenswrapper[4792]: I0929 18:57:08.766504 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:08 crc kubenswrapper[4792]: I0929 18:57:08.766513 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:08Z","lastTransitionTime":"2025-09-29T18:57:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:08 crc kubenswrapper[4792]: I0929 18:57:08.868244 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:08 crc kubenswrapper[4792]: I0929 18:57:08.868288 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:08 crc kubenswrapper[4792]: I0929 18:57:08.868296 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:08 crc kubenswrapper[4792]: I0929 18:57:08.868311 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:08 crc kubenswrapper[4792]: I0929 18:57:08.868320 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:08Z","lastTransitionTime":"2025-09-29T18:57:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:08 crc kubenswrapper[4792]: I0929 18:57:08.970797 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:08 crc kubenswrapper[4792]: I0929 18:57:08.970841 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:08 crc kubenswrapper[4792]: I0929 18:57:08.970869 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:08 crc kubenswrapper[4792]: I0929 18:57:08.970887 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:08 crc kubenswrapper[4792]: I0929 18:57:08.970898 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:08Z","lastTransitionTime":"2025-09-29T18:57:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:09 crc kubenswrapper[4792]: I0929 18:57:09.014316 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-v5b2m" Sep 29 18:57:09 crc kubenswrapper[4792]: I0929 18:57:09.014448 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 18:57:09 crc kubenswrapper[4792]: E0929 18:57:09.014667 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-v5b2m" podUID="fd292349-0e5a-4d80-b163-193aa43c98db" Sep 29 18:57:09 crc kubenswrapper[4792]: I0929 18:57:09.014930 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 18:57:09 crc kubenswrapper[4792]: I0929 18:57:09.014985 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 18:57:09 crc kubenswrapper[4792]: E0929 18:57:09.015100 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 18:57:09 crc kubenswrapper[4792]: E0929 18:57:09.015175 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 18:57:09 crc kubenswrapper[4792]: E0929 18:57:09.015233 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 18:57:09 crc kubenswrapper[4792]: I0929 18:57:09.038841 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:52Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:52Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cb29207afd9a5fb06242890aaf6d32f2f789cbf824b0246706e7214486ac529c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:09Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:09 crc kubenswrapper[4792]: I0929 18:57:09.052017 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0ae66548-086e-4ca9-bd6f-281ce46e7557\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b27d8e307d9f6545acd48d9a838dc98fec84ca2e48b357966af22144b8cd415f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://305645f1f10b20984067c3d0d32bc9a5936e191faecff2bb494be005fc471c65\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:53Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-p5q59\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:09Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:09 crc kubenswrapper[4792]: I0929 18:57:09.069061 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-rqbjv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"67c58ee5-e056-4e3e-91ed-a116350f2408\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://17470ef608c6b717c0346349c1e72046e200b3879df2772778878b0e83c05b7e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:57:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ms9xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cdd799a430b3a444b7ba74ae8c285de28790049390a462485812fe117f9dfbe8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cdd799a430b3a444b7ba74ae8c285de28790049390a462485812fe117f9dfbe8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T18:56:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ms9xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1d779fed3cf67ba40d6664f26d829858ec14749c48c09678b73d7fb8fe73c827\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1d779fed3cf67ba40d6664f26d829858ec14749c48c09678b73d7fb8fe73c827\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T18:56:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ms9xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3094a1172df2fd98e699c4d368a14584e51bce43389c9c6432e24e78d460a3eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3094a1172df2fd98e699c4d368a14584e51bce43389c9c6432e24e78d460a3eb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T18:56:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ms9xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://79dfa5c03ec31df7b6477646c437b7490658801c0b8f7fac5e9149e4c7a882bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://79dfa5c03ec31df7b6477646c437b7490658801c0b8f7fac5e9149e4c7a882bf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T18:56:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ms9xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://aed6e427a87d4a4617a9d1c9a4d37cf2f9815d1759336026545d563b1f9b6976\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aed6e427a87d4a4617a9d1c9a4d37cf2f9815d1759336026545d563b1f9b6976\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T18:57:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ms9xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0830e9f46c282fc2be6beeea2654758eb0b3a0a86b802f495928c846bc49f7ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0830e9f46c282fc2be6beeea2654758eb0b3a0a86b802f495928c846bc49f7ed\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T18:57:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T18:57:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ms9xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:53Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-rqbjv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:09Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:09 crc kubenswrapper[4792]: I0929 18:57:09.072810 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:09 crc kubenswrapper[4792]: I0929 18:57:09.072890 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:09 crc kubenswrapper[4792]: I0929 18:57:09.072916 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:09 crc kubenswrapper[4792]: I0929 18:57:09.072943 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:09 crc kubenswrapper[4792]: I0929 18:57:09.072965 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:09Z","lastTransitionTime":"2025-09-29T18:57:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:09 crc kubenswrapper[4792]: I0929 18:57:09.089145 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-v5b2m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fd292349-0e5a-4d80-b163-193aa43c98db\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:07Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:07Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:07Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d8ps7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d8ps7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:57:07Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-v5b2m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:09Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:09 crc kubenswrapper[4792]: I0929 18:57:09.108026 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"49187618-8fed-4b0f-bdf8-800408f708fc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://325b543480e9e1abd49c6ce98398a79ef51983b8035774b2e88447ee547733c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://12d3875b8db9620798f766024b1bc43b78759f42e467b67aaf87f0b0154a8fad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://100ab44da711fddded7f88aa053b6a47d1c8302557d9ae6a56d4f744140e34bd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://af34e705a941f92c031edf3d214a902640010036401914f60e598a46043d5eb3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:29Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:09Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:09 crc kubenswrapper[4792]: I0929 18:57:09.126356 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ba8b9fef5faf6504a0e363f092cc9f60b03723775a0a0624b6302b3dac43a7ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:09Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:09 crc kubenswrapper[4792]: I0929 18:57:09.131809 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/fd292349-0e5a-4d80-b163-193aa43c98db-metrics-certs\") pod \"network-metrics-daemon-v5b2m\" (UID: \"fd292349-0e5a-4d80-b163-193aa43c98db\") " pod="openshift-multus/network-metrics-daemon-v5b2m" Sep 29 18:57:09 crc kubenswrapper[4792]: E0929 18:57:09.132013 4792 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Sep 29 18:57:09 crc kubenswrapper[4792]: E0929 18:57:09.132095 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/fd292349-0e5a-4d80-b163-193aa43c98db-metrics-certs podName:fd292349-0e5a-4d80-b163-193aa43c98db nodeName:}" failed. No retries permitted until 2025-09-29 18:57:11.13207336 +0000 UTC m=+43.125380766 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/fd292349-0e5a-4d80-b163-193aa43c98db-metrics-certs") pod "network-metrics-daemon-v5b2m" (UID: "fd292349-0e5a-4d80-b163-193aa43c98db") : object "openshift-multus"/"metrics-daemon-secret" not registered Sep 29 18:57:09 crc kubenswrapper[4792]: I0929 18:57:09.146679 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8067c4cf598ce2d361c4a76b51ef3cf14d1fc84ad7ee193d76e20cd980f197be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://693ef3ee15f0b8762a16adc20435397e073dea4b0028f4175899cb956eaab303\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:09Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:09 crc kubenswrapper[4792]: I0929 18:57:09.169406 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-hr4cm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"716c5fdd-0e02-4066-9210-93d805b6fe81\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:53Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:53Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7b64445ce1e067504326c5005136522f885ba8796579cfb651019d2372a89173\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://46a3cf64e8fd5f5c75be0dd56175bd00e95e2780c73e39558e3b68ca1e6a44bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3408e50d82d1e7f50d9cd4fb2b4e078059bbc4daba10ca93c3cab56d4fe190be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a5023531ae972c8f19f5fbf8cdb3c4040f1b63d5d7b9d00e885607f0f84c88a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e9625b3628f291ecaa686da104d719695bd8c46eb46d08f9eccab27a2013627\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3a44c0899a9afeaa74bb22565c3f9514603ce1b83f9794539f677d067785925\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e402552ce11d3c59a676cb86052c03bd0f6ef0c17950b38a05cb3ae1f56e205d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a69c10a011d015e4ba98c0b6bdfe1a4d2644bb658d60896c45798ecb564cb563\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T18:57:04Z\\\",\\\"message\\\":\\\".go:208] Removed *v1.Namespace event handler 5\\\\nI0929 18:57:04.475247 5977 handler.go:208] Removed *v1.Node event handler 2\\\\nI0929 18:57:04.475254 5977 handler.go:208] Removed *v1.Node event handler 7\\\\nI0929 18:57:04.475261 5977 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0929 18:57:04.475268 5977 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI0929 18:57:04.475276 5977 handler.go:208] Removed *v1.Pod event handler 3\\\\nI0929 18:57:04.475478 5977 reflector.go:311] Stopping reflector *v1.EgressIP (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI0929 18:57:04.475614 5977 reflector.go:311] Stopping reflector *v1alpha1.BaselineAdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI0929 18:57:04.476210 5977 reflector.go:311] Stopping reflector *v1alpha1.AdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI0929 18:57:04.476299 5977 reflector.go:311] Stopping reflector *v1.Pod (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0929 18:57:04.476380 5977 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0929 18:57:04.476931 5977 factory.go:656] Stopping \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T18:57:00Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e402552ce11d3c59a676cb86052c03bd0f6ef0c17950b38a05cb3ae1f56e205d\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T18:57:07Z\\\",\\\"message\\\":\\\"DName:}]\\\\nI0929 18:57:07.047441 6143 transact.go:42] Configuring OVN: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-marketplace/redhat-marketplace]} name:Service_openshift-marketplace/redhat-marketplace_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.5.140:50051:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {97b6e7b0-06ca-455e-8259-06895040cb0c}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI0929 18:57:07.047477 6143 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF0929 18:57:07.047535 6143 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: handler {0x1fe5060 0x1fe4d40 0x1fe4ce0} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: cer\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T18:57:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7138892e31e3d1949d0ae4789515fc0fd9868469eb14de1464a2f59786b85f08\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d0516004c2ea4a5711f5e00dcfa01fd5c8d0c0d0d60fd31b0d7da586cd83a820\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d0516004c2ea4a5711f5e00dcfa01fd5c8d0c0d0d60fd31b0d7da586cd83a820\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T18:56:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:53Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-hr4cm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:09Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:09 crc kubenswrapper[4792]: I0929 18:57:09.175782 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:09 crc kubenswrapper[4792]: I0929 18:57:09.175823 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:09 crc kubenswrapper[4792]: I0929 18:57:09.175835 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:09 crc kubenswrapper[4792]: I0929 18:57:09.175890 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:09 crc kubenswrapper[4792]: I0929 18:57:09.175902 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:09Z","lastTransitionTime":"2025-09-29T18:57:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:09 crc kubenswrapper[4792]: I0929 18:57:09.185038 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-5hwvp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"100876d3-2539-47f1-91fa-0f91456ccac1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3ff4d11cfba0349ddf3f5a14c525716cfdff95c71698634e8feca328d6e41e2d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfblz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:53Z\\\"}}\" for pod \"openshift-multus\"/\"multus-5hwvp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:09Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:09 crc kubenswrapper[4792]: I0929 18:57:09.207128 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"10bc9cb6-78d3-43a6-8276-db1cb1c116e0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:29Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:29Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://238035b6ad975064a44e7e9e760ae9f09c9ff2735ecc69f65df3fc3176f6d3b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://659f798faed7d5c35bf7959b8e42a37f2289854714d513962716cb73a0674a27\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://38d29fa86782b007a60cbc1ea8f2ccc250dabb38c84eb61931b05fbc170e6538\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f292b6e3b4a31cc851066c3112871836e2c896dea8913da0d3c5579fe5ebb65\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0fdb8c0687f0a5ce31078f6d7a9b643c41ad23199eff4b2878403ee5fd31f69f\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T18:56:45Z\\\",\\\"message\\\":\\\"9 18:56:45.310775 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0929 18:56:45.310981 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI0929 18:56:45.312802 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0929 18:56:45.312836 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0929 18:56:45.312870 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0929 18:56:45.312900 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0929 18:56:45.312941 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI0929 18:56:45.312944 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0929 18:56:45.312985 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI0929 18:56:45.313033 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI0929 18:56:45.313121 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-682747971/tls.crt::/tmp/serving-cert-682747971/tls.key\\\\\\\"\\\\nI0929 18:56:45.313163 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-682747971/tls.crt::/tmp/serving-cert-682747971/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1759172204\\\\\\\\\\\\\\\" (2025-09-29 18:56:43 +0000 UTC to 2025-10-29 18:56:44 +0000 UTC (now=2025-09-29 18:56:45.313121069 +0000 UTC))\\\\\\\"\\\\nF0929 18:56:45.313206 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:44Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:57:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://07171a8c3c7812c016ee534ff1332f697d2b2cdfa70fc9d94ae6a5f312e0e433\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:32Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6e733b73d0293211bf2e8e97dc7db49c34e8ac1ef1e6e19013183d8518345959\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6e733b73d0293211bf2e8e97dc7db49c34e8ac1ef1e6e19013183d8518345959\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T18:56:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:29Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:09Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:09 crc kubenswrapper[4792]: I0929 18:57:09.222710 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:09Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:09 crc kubenswrapper[4792]: I0929 18:57:09.236493 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:09Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:09 crc kubenswrapper[4792]: I0929 18:57:09.249203 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-c228l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc16dcda-372e-4aac-8c12-148bf93e8783\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://af2529526423852e215c3201a4d8807a880e07e9cf71d593f304a4a3c99900eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6mz5l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:52Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-c228l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:09Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:09 crc kubenswrapper[4792]: I0929 18:57:09.260494 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-rr4g5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1762a3e4-6068-48d9-9b1d-bd5b893803bb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:06Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:06Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xnnkn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xnnkn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:57:06Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-rr4g5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:09Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:09 crc kubenswrapper[4792]: I0929 18:57:09.275189 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:09Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:09 crc kubenswrapper[4792]: I0929 18:57:09.278216 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:09 crc kubenswrapper[4792]: I0929 18:57:09.278266 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:09 crc kubenswrapper[4792]: I0929 18:57:09.278279 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:09 crc kubenswrapper[4792]: I0929 18:57:09.278298 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:09 crc kubenswrapper[4792]: I0929 18:57:09.278317 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:09Z","lastTransitionTime":"2025-09-29T18:57:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:09 crc kubenswrapper[4792]: I0929 18:57:09.291006 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-4gmtk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b84b9e91-b50e-4271-bfc8-be15652128c5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b32084075b7423c8211ca56595a2eb11add581b500043804cb09f13d07788bd6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lc999\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:55Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-4gmtk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:09Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:09 crc kubenswrapper[4792]: I0929 18:57:09.316030 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-rr4g5" event={"ID":"1762a3e4-6068-48d9-9b1d-bd5b893803bb","Type":"ContainerStarted","Data":"8053630e82cf267f19de300922033d2cade4b754707ca7c0d7fbfd7e4957eefc"} Sep 29 18:57:09 crc kubenswrapper[4792]: I0929 18:57:09.316081 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-rr4g5" event={"ID":"1762a3e4-6068-48d9-9b1d-bd5b893803bb","Type":"ContainerStarted","Data":"7d6bc8edd4388d2cc81a9741b23060a4982a85a1ac8ec23f2052436cff8cd7fe"} Sep 29 18:57:09 crc kubenswrapper[4792]: I0929 18:57:09.330951 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-5hwvp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"100876d3-2539-47f1-91fa-0f91456ccac1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3ff4d11cfba0349ddf3f5a14c525716cfdff95c71698634e8feca328d6e41e2d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfblz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:53Z\\\"}}\" for pod \"openshift-multus\"/\"multus-5hwvp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:09Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:09 crc kubenswrapper[4792]: I0929 18:57:09.345664 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"10bc9cb6-78d3-43a6-8276-db1cb1c116e0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:29Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:29Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://238035b6ad975064a44e7e9e760ae9f09c9ff2735ecc69f65df3fc3176f6d3b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://659f798faed7d5c35bf7959b8e42a37f2289854714d513962716cb73a0674a27\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://38d29fa86782b007a60cbc1ea8f2ccc250dabb38c84eb61931b05fbc170e6538\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f292b6e3b4a31cc851066c3112871836e2c896dea8913da0d3c5579fe5ebb65\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0fdb8c0687f0a5ce31078f6d7a9b643c41ad23199eff4b2878403ee5fd31f69f\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T18:56:45Z\\\",\\\"message\\\":\\\"9 18:56:45.310775 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0929 18:56:45.310981 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI0929 18:56:45.312802 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0929 18:56:45.312836 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0929 18:56:45.312870 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0929 18:56:45.312900 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0929 18:56:45.312941 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI0929 18:56:45.312944 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0929 18:56:45.312985 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI0929 18:56:45.313033 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI0929 18:56:45.313121 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-682747971/tls.crt::/tmp/serving-cert-682747971/tls.key\\\\\\\"\\\\nI0929 18:56:45.313163 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-682747971/tls.crt::/tmp/serving-cert-682747971/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1759172204\\\\\\\\\\\\\\\" (2025-09-29 18:56:43 +0000 UTC to 2025-10-29 18:56:44 +0000 UTC (now=2025-09-29 18:56:45.313121069 +0000 UTC))\\\\\\\"\\\\nF0929 18:56:45.313206 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:44Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:57:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://07171a8c3c7812c016ee534ff1332f697d2b2cdfa70fc9d94ae6a5f312e0e433\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:32Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6e733b73d0293211bf2e8e97dc7db49c34e8ac1ef1e6e19013183d8518345959\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6e733b73d0293211bf2e8e97dc7db49c34e8ac1ef1e6e19013183d8518345959\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T18:56:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:29Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:09Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:09 crc kubenswrapper[4792]: I0929 18:57:09.356139 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ba8b9fef5faf6504a0e363f092cc9f60b03723775a0a0624b6302b3dac43a7ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:09Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:09 crc kubenswrapper[4792]: I0929 18:57:09.368472 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8067c4cf598ce2d361c4a76b51ef3cf14d1fc84ad7ee193d76e20cd980f197be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://693ef3ee15f0b8762a16adc20435397e073dea4b0028f4175899cb956eaab303\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:09Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:09 crc kubenswrapper[4792]: I0929 18:57:09.381011 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:09 crc kubenswrapper[4792]: I0929 18:57:09.381244 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:09 crc kubenswrapper[4792]: I0929 18:57:09.381318 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:09 crc kubenswrapper[4792]: I0929 18:57:09.381382 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:09 crc kubenswrapper[4792]: I0929 18:57:09.381459 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:09Z","lastTransitionTime":"2025-09-29T18:57:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:09 crc kubenswrapper[4792]: I0929 18:57:09.386241 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-hr4cm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"716c5fdd-0e02-4066-9210-93d805b6fe81\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:53Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:53Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7b64445ce1e067504326c5005136522f885ba8796579cfb651019d2372a89173\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://46a3cf64e8fd5f5c75be0dd56175bd00e95e2780c73e39558e3b68ca1e6a44bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3408e50d82d1e7f50d9cd4fb2b4e078059bbc4daba10ca93c3cab56d4fe190be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a5023531ae972c8f19f5fbf8cdb3c4040f1b63d5d7b9d00e885607f0f84c88a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e9625b3628f291ecaa686da104d719695bd8c46eb46d08f9eccab27a2013627\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3a44c0899a9afeaa74bb22565c3f9514603ce1b83f9794539f677d067785925\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e402552ce11d3c59a676cb86052c03bd0f6ef0c17950b38a05cb3ae1f56e205d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a69c10a011d015e4ba98c0b6bdfe1a4d2644bb658d60896c45798ecb564cb563\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T18:57:04Z\\\",\\\"message\\\":\\\".go:208] Removed *v1.Namespace event handler 5\\\\nI0929 18:57:04.475247 5977 handler.go:208] Removed *v1.Node event handler 2\\\\nI0929 18:57:04.475254 5977 handler.go:208] Removed *v1.Node event handler 7\\\\nI0929 18:57:04.475261 5977 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0929 18:57:04.475268 5977 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI0929 18:57:04.475276 5977 handler.go:208] Removed *v1.Pod event handler 3\\\\nI0929 18:57:04.475478 5977 reflector.go:311] Stopping reflector *v1.EgressIP (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI0929 18:57:04.475614 5977 reflector.go:311] Stopping reflector *v1alpha1.BaselineAdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI0929 18:57:04.476210 5977 reflector.go:311] Stopping reflector *v1alpha1.AdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI0929 18:57:04.476299 5977 reflector.go:311] Stopping reflector *v1.Pod (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0929 18:57:04.476380 5977 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0929 18:57:04.476931 5977 factory.go:656] Stopping \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T18:57:00Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e402552ce11d3c59a676cb86052c03bd0f6ef0c17950b38a05cb3ae1f56e205d\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T18:57:07Z\\\",\\\"message\\\":\\\"DName:}]\\\\nI0929 18:57:07.047441 6143 transact.go:42] Configuring OVN: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-marketplace/redhat-marketplace]} name:Service_openshift-marketplace/redhat-marketplace_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.5.140:50051:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {97b6e7b0-06ca-455e-8259-06895040cb0c}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI0929 18:57:07.047477 6143 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF0929 18:57:07.047535 6143 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: handler {0x1fe5060 0x1fe4d40 0x1fe4ce0} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: cer\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T18:57:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7138892e31e3d1949d0ae4789515fc0fd9868469eb14de1464a2f59786b85f08\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d0516004c2ea4a5711f5e00dcfa01fd5c8d0c0d0d60fd31b0d7da586cd83a820\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d0516004c2ea4a5711f5e00dcfa01fd5c8d0c0d0d60fd31b0d7da586cd83a820\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T18:56:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:53Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-hr4cm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:09Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:09 crc kubenswrapper[4792]: I0929 18:57:09.398018 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-rr4g5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1762a3e4-6068-48d9-9b1d-bd5b893803bb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7d6bc8edd4388d2cc81a9741b23060a4982a85a1ac8ec23f2052436cff8cd7fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:57:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xnnkn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8053630e82cf267f19de300922033d2cade4b754707ca7c0d7fbfd7e4957eefc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:57:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xnnkn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:57:06Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-rr4g5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:09Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:09 crc kubenswrapper[4792]: I0929 18:57:09.411471 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:09Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:09 crc kubenswrapper[4792]: I0929 18:57:09.420952 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:09Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:09 crc kubenswrapper[4792]: I0929 18:57:09.430445 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-c228l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc16dcda-372e-4aac-8c12-148bf93e8783\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://af2529526423852e215c3201a4d8807a880e07e9cf71d593f304a4a3c99900eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6mz5l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:52Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-c228l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:09Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:09 crc kubenswrapper[4792]: I0929 18:57:09.443109 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:09Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:09 crc kubenswrapper[4792]: I0929 18:57:09.454303 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-4gmtk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b84b9e91-b50e-4271-bfc8-be15652128c5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b32084075b7423c8211ca56595a2eb11add581b500043804cb09f13d07788bd6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lc999\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:55Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-4gmtk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:09Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:09 crc kubenswrapper[4792]: I0929 18:57:09.464242 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-v5b2m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fd292349-0e5a-4d80-b163-193aa43c98db\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:07Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:07Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:07Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d8ps7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d8ps7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:57:07Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-v5b2m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:09Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:09 crc kubenswrapper[4792]: I0929 18:57:09.476486 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"49187618-8fed-4b0f-bdf8-800408f708fc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://325b543480e9e1abd49c6ce98398a79ef51983b8035774b2e88447ee547733c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://12d3875b8db9620798f766024b1bc43b78759f42e467b67aaf87f0b0154a8fad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://100ab44da711fddded7f88aa053b6a47d1c8302557d9ae6a56d4f744140e34bd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://af34e705a941f92c031edf3d214a902640010036401914f60e598a46043d5eb3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:29Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:09Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:09 crc kubenswrapper[4792]: I0929 18:57:09.483432 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:09 crc kubenswrapper[4792]: I0929 18:57:09.483480 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:09 crc kubenswrapper[4792]: I0929 18:57:09.483495 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:09 crc kubenswrapper[4792]: I0929 18:57:09.483516 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:09 crc kubenswrapper[4792]: I0929 18:57:09.483531 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:09Z","lastTransitionTime":"2025-09-29T18:57:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:09 crc kubenswrapper[4792]: I0929 18:57:09.488470 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:52Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:52Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cb29207afd9a5fb06242890aaf6d32f2f789cbf824b0246706e7214486ac529c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:09Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:09 crc kubenswrapper[4792]: I0929 18:57:09.498476 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0ae66548-086e-4ca9-bd6f-281ce46e7557\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b27d8e307d9f6545acd48d9a838dc98fec84ca2e48b357966af22144b8cd415f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://305645f1f10b20984067c3d0d32bc9a5936e191faecff2bb494be005fc471c65\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:53Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-p5q59\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:09Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:09 crc kubenswrapper[4792]: I0929 18:57:09.510591 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-rqbjv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"67c58ee5-e056-4e3e-91ed-a116350f2408\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://17470ef608c6b717c0346349c1e72046e200b3879df2772778878b0e83c05b7e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:57:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ms9xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cdd799a430b3a444b7ba74ae8c285de28790049390a462485812fe117f9dfbe8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cdd799a430b3a444b7ba74ae8c285de28790049390a462485812fe117f9dfbe8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T18:56:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ms9xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1d779fed3cf67ba40d6664f26d829858ec14749c48c09678b73d7fb8fe73c827\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1d779fed3cf67ba40d6664f26d829858ec14749c48c09678b73d7fb8fe73c827\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T18:56:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ms9xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3094a1172df2fd98e699c4d368a14584e51bce43389c9c6432e24e78d460a3eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3094a1172df2fd98e699c4d368a14584e51bce43389c9c6432e24e78d460a3eb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T18:56:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ms9xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://79dfa5c03ec31df7b6477646c437b7490658801c0b8f7fac5e9149e4c7a882bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://79dfa5c03ec31df7b6477646c437b7490658801c0b8f7fac5e9149e4c7a882bf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T18:56:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ms9xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://aed6e427a87d4a4617a9d1c9a4d37cf2f9815d1759336026545d563b1f9b6976\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aed6e427a87d4a4617a9d1c9a4d37cf2f9815d1759336026545d563b1f9b6976\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T18:57:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ms9xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0830e9f46c282fc2be6beeea2654758eb0b3a0a86b802f495928c846bc49f7ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0830e9f46c282fc2be6beeea2654758eb0b3a0a86b802f495928c846bc49f7ed\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T18:57:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T18:57:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ms9xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:53Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-rqbjv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:09Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:09 crc kubenswrapper[4792]: I0929 18:57:09.587489 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:09 crc kubenswrapper[4792]: I0929 18:57:09.587536 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:09 crc kubenswrapper[4792]: I0929 18:57:09.587555 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:09 crc kubenswrapper[4792]: I0929 18:57:09.587580 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:09 crc kubenswrapper[4792]: I0929 18:57:09.587598 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:09Z","lastTransitionTime":"2025-09-29T18:57:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:09 crc kubenswrapper[4792]: I0929 18:57:09.690761 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:09 crc kubenswrapper[4792]: I0929 18:57:09.690910 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:09 crc kubenswrapper[4792]: I0929 18:57:09.690930 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:09 crc kubenswrapper[4792]: I0929 18:57:09.690957 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:09 crc kubenswrapper[4792]: I0929 18:57:09.690975 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:09Z","lastTransitionTime":"2025-09-29T18:57:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:09 crc kubenswrapper[4792]: I0929 18:57:09.794214 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:09 crc kubenswrapper[4792]: I0929 18:57:09.794284 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:09 crc kubenswrapper[4792]: I0929 18:57:09.794307 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:09 crc kubenswrapper[4792]: I0929 18:57:09.794334 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:09 crc kubenswrapper[4792]: I0929 18:57:09.794415 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:09Z","lastTransitionTime":"2025-09-29T18:57:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:09 crc kubenswrapper[4792]: I0929 18:57:09.898058 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:09 crc kubenswrapper[4792]: I0929 18:57:09.898119 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:09 crc kubenswrapper[4792]: I0929 18:57:09.898141 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:09 crc kubenswrapper[4792]: I0929 18:57:09.898170 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:09 crc kubenswrapper[4792]: I0929 18:57:09.898191 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:09Z","lastTransitionTime":"2025-09-29T18:57:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:10 crc kubenswrapper[4792]: I0929 18:57:10.001369 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:10 crc kubenswrapper[4792]: I0929 18:57:10.001427 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:10 crc kubenswrapper[4792]: I0929 18:57:10.001443 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:10 crc kubenswrapper[4792]: I0929 18:57:10.001466 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:10 crc kubenswrapper[4792]: I0929 18:57:10.001483 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:10Z","lastTransitionTime":"2025-09-29T18:57:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:10 crc kubenswrapper[4792]: I0929 18:57:10.103767 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:10 crc kubenswrapper[4792]: I0929 18:57:10.103798 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:10 crc kubenswrapper[4792]: I0929 18:57:10.103809 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:10 crc kubenswrapper[4792]: I0929 18:57:10.103824 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:10 crc kubenswrapper[4792]: I0929 18:57:10.103834 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:10Z","lastTransitionTime":"2025-09-29T18:57:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:10 crc kubenswrapper[4792]: I0929 18:57:10.206811 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:10 crc kubenswrapper[4792]: I0929 18:57:10.206910 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:10 crc kubenswrapper[4792]: I0929 18:57:10.206928 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:10 crc kubenswrapper[4792]: I0929 18:57:10.206957 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:10 crc kubenswrapper[4792]: I0929 18:57:10.206974 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:10Z","lastTransitionTime":"2025-09-29T18:57:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:10 crc kubenswrapper[4792]: I0929 18:57:10.308784 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:10 crc kubenswrapper[4792]: I0929 18:57:10.308823 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:10 crc kubenswrapper[4792]: I0929 18:57:10.308831 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:10 crc kubenswrapper[4792]: I0929 18:57:10.308844 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:10 crc kubenswrapper[4792]: I0929 18:57:10.308866 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:10Z","lastTransitionTime":"2025-09-29T18:57:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:10 crc kubenswrapper[4792]: I0929 18:57:10.411180 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:10 crc kubenswrapper[4792]: I0929 18:57:10.411211 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:10 crc kubenswrapper[4792]: I0929 18:57:10.411219 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:10 crc kubenswrapper[4792]: I0929 18:57:10.411232 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:10 crc kubenswrapper[4792]: I0929 18:57:10.411241 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:10Z","lastTransitionTime":"2025-09-29T18:57:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:10 crc kubenswrapper[4792]: I0929 18:57:10.513078 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:10 crc kubenswrapper[4792]: I0929 18:57:10.513109 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:10 crc kubenswrapper[4792]: I0929 18:57:10.513117 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:10 crc kubenswrapper[4792]: I0929 18:57:10.513130 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:10 crc kubenswrapper[4792]: I0929 18:57:10.513139 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:10Z","lastTransitionTime":"2025-09-29T18:57:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:10 crc kubenswrapper[4792]: I0929 18:57:10.615372 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:10 crc kubenswrapper[4792]: I0929 18:57:10.615417 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:10 crc kubenswrapper[4792]: I0929 18:57:10.615427 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:10 crc kubenswrapper[4792]: I0929 18:57:10.615445 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:10 crc kubenswrapper[4792]: I0929 18:57:10.615456 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:10Z","lastTransitionTime":"2025-09-29T18:57:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:10 crc kubenswrapper[4792]: I0929 18:57:10.718418 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:10 crc kubenswrapper[4792]: I0929 18:57:10.718480 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:10 crc kubenswrapper[4792]: I0929 18:57:10.718498 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:10 crc kubenswrapper[4792]: I0929 18:57:10.718524 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:10 crc kubenswrapper[4792]: I0929 18:57:10.718542 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:10Z","lastTransitionTime":"2025-09-29T18:57:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:10 crc kubenswrapper[4792]: I0929 18:57:10.820383 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:10 crc kubenswrapper[4792]: I0929 18:57:10.820637 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:10 crc kubenswrapper[4792]: I0929 18:57:10.820697 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:10 crc kubenswrapper[4792]: I0929 18:57:10.820768 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:10 crc kubenswrapper[4792]: I0929 18:57:10.820885 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:10Z","lastTransitionTime":"2025-09-29T18:57:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:10 crc kubenswrapper[4792]: I0929 18:57:10.923542 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:10 crc kubenswrapper[4792]: I0929 18:57:10.923569 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:10 crc kubenswrapper[4792]: I0929 18:57:10.923580 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:10 crc kubenswrapper[4792]: I0929 18:57:10.923595 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:10 crc kubenswrapper[4792]: I0929 18:57:10.923604 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:10Z","lastTransitionTime":"2025-09-29T18:57:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:11 crc kubenswrapper[4792]: I0929 18:57:11.015143 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 18:57:11 crc kubenswrapper[4792]: E0929 18:57:11.015470 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 18:57:11 crc kubenswrapper[4792]: I0929 18:57:11.015228 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-v5b2m" Sep 29 18:57:11 crc kubenswrapper[4792]: E0929 18:57:11.015718 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-v5b2m" podUID="fd292349-0e5a-4d80-b163-193aa43c98db" Sep 29 18:57:11 crc kubenswrapper[4792]: I0929 18:57:11.015149 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 18:57:11 crc kubenswrapper[4792]: E0929 18:57:11.015951 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 18:57:11 crc kubenswrapper[4792]: I0929 18:57:11.015300 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 18:57:11 crc kubenswrapper[4792]: E0929 18:57:11.016145 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 18:57:11 crc kubenswrapper[4792]: I0929 18:57:11.025759 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:11 crc kubenswrapper[4792]: I0929 18:57:11.025818 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:11 crc kubenswrapper[4792]: I0929 18:57:11.025876 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:11 crc kubenswrapper[4792]: I0929 18:57:11.025909 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:11 crc kubenswrapper[4792]: I0929 18:57:11.025932 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:11Z","lastTransitionTime":"2025-09-29T18:57:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:11 crc kubenswrapper[4792]: I0929 18:57:11.128447 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:11 crc kubenswrapper[4792]: I0929 18:57:11.128490 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:11 crc kubenswrapper[4792]: I0929 18:57:11.128502 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:11 crc kubenswrapper[4792]: I0929 18:57:11.128521 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:11 crc kubenswrapper[4792]: I0929 18:57:11.128533 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:11Z","lastTransitionTime":"2025-09-29T18:57:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:11 crc kubenswrapper[4792]: I0929 18:57:11.148955 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/fd292349-0e5a-4d80-b163-193aa43c98db-metrics-certs\") pod \"network-metrics-daemon-v5b2m\" (UID: \"fd292349-0e5a-4d80-b163-193aa43c98db\") " pod="openshift-multus/network-metrics-daemon-v5b2m" Sep 29 18:57:11 crc kubenswrapper[4792]: E0929 18:57:11.149076 4792 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Sep 29 18:57:11 crc kubenswrapper[4792]: E0929 18:57:11.149365 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/fd292349-0e5a-4d80-b163-193aa43c98db-metrics-certs podName:fd292349-0e5a-4d80-b163-193aa43c98db nodeName:}" failed. No retries permitted until 2025-09-29 18:57:15.149343792 +0000 UTC m=+47.142651198 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/fd292349-0e5a-4d80-b163-193aa43c98db-metrics-certs") pod "network-metrics-daemon-v5b2m" (UID: "fd292349-0e5a-4d80-b163-193aa43c98db") : object "openshift-multus"/"metrics-daemon-secret" not registered Sep 29 18:57:11 crc kubenswrapper[4792]: I0929 18:57:11.231184 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:11 crc kubenswrapper[4792]: I0929 18:57:11.231264 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:11 crc kubenswrapper[4792]: I0929 18:57:11.231287 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:11 crc kubenswrapper[4792]: I0929 18:57:11.231314 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:11 crc kubenswrapper[4792]: I0929 18:57:11.231336 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:11Z","lastTransitionTime":"2025-09-29T18:57:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:11 crc kubenswrapper[4792]: I0929 18:57:11.333403 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:11 crc kubenswrapper[4792]: I0929 18:57:11.333671 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:11 crc kubenswrapper[4792]: I0929 18:57:11.333752 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:11 crc kubenswrapper[4792]: I0929 18:57:11.333815 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:11 crc kubenswrapper[4792]: I0929 18:57:11.333903 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:11Z","lastTransitionTime":"2025-09-29T18:57:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:11 crc kubenswrapper[4792]: I0929 18:57:11.436721 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:11 crc kubenswrapper[4792]: I0929 18:57:11.436782 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:11 crc kubenswrapper[4792]: I0929 18:57:11.436802 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:11 crc kubenswrapper[4792]: I0929 18:57:11.436828 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:11 crc kubenswrapper[4792]: I0929 18:57:11.436846 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:11Z","lastTransitionTime":"2025-09-29T18:57:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:11 crc kubenswrapper[4792]: I0929 18:57:11.539432 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:11 crc kubenswrapper[4792]: I0929 18:57:11.539493 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:11 crc kubenswrapper[4792]: I0929 18:57:11.539511 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:11 crc kubenswrapper[4792]: I0929 18:57:11.539535 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:11 crc kubenswrapper[4792]: I0929 18:57:11.539553 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:11Z","lastTransitionTime":"2025-09-29T18:57:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:11 crc kubenswrapper[4792]: I0929 18:57:11.641972 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:11 crc kubenswrapper[4792]: I0929 18:57:11.642368 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:11 crc kubenswrapper[4792]: I0929 18:57:11.642397 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:11 crc kubenswrapper[4792]: I0929 18:57:11.642420 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:11 crc kubenswrapper[4792]: I0929 18:57:11.642437 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:11Z","lastTransitionTime":"2025-09-29T18:57:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:11 crc kubenswrapper[4792]: I0929 18:57:11.745226 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:11 crc kubenswrapper[4792]: I0929 18:57:11.745301 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:11 crc kubenswrapper[4792]: I0929 18:57:11.745314 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:11 crc kubenswrapper[4792]: I0929 18:57:11.745352 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:11 crc kubenswrapper[4792]: I0929 18:57:11.745366 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:11Z","lastTransitionTime":"2025-09-29T18:57:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:11 crc kubenswrapper[4792]: I0929 18:57:11.848644 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:11 crc kubenswrapper[4792]: I0929 18:57:11.848698 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:11 crc kubenswrapper[4792]: I0929 18:57:11.848716 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:11 crc kubenswrapper[4792]: I0929 18:57:11.848739 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:11 crc kubenswrapper[4792]: I0929 18:57:11.848755 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:11Z","lastTransitionTime":"2025-09-29T18:57:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:11 crc kubenswrapper[4792]: I0929 18:57:11.951389 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:11 crc kubenswrapper[4792]: I0929 18:57:11.951462 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:11 crc kubenswrapper[4792]: I0929 18:57:11.951481 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:11 crc kubenswrapper[4792]: I0929 18:57:11.951505 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:11 crc kubenswrapper[4792]: I0929 18:57:11.951522 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:11Z","lastTransitionTime":"2025-09-29T18:57:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:12 crc kubenswrapper[4792]: I0929 18:57:12.054167 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:12 crc kubenswrapper[4792]: I0929 18:57:12.054204 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:12 crc kubenswrapper[4792]: I0929 18:57:12.054212 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:12 crc kubenswrapper[4792]: I0929 18:57:12.054227 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:12 crc kubenswrapper[4792]: I0929 18:57:12.054236 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:12Z","lastTransitionTime":"2025-09-29T18:57:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:12 crc kubenswrapper[4792]: I0929 18:57:12.156682 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:12 crc kubenswrapper[4792]: I0929 18:57:12.157012 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:12 crc kubenswrapper[4792]: I0929 18:57:12.157189 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:12 crc kubenswrapper[4792]: I0929 18:57:12.157378 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:12 crc kubenswrapper[4792]: I0929 18:57:12.157506 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:12Z","lastTransitionTime":"2025-09-29T18:57:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:12 crc kubenswrapper[4792]: I0929 18:57:12.261195 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:12 crc kubenswrapper[4792]: I0929 18:57:12.261279 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:12 crc kubenswrapper[4792]: I0929 18:57:12.261296 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:12 crc kubenswrapper[4792]: I0929 18:57:12.261321 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:12 crc kubenswrapper[4792]: I0929 18:57:12.261338 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:12Z","lastTransitionTime":"2025-09-29T18:57:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:12 crc kubenswrapper[4792]: I0929 18:57:12.363393 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:12 crc kubenswrapper[4792]: I0929 18:57:12.363721 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:12 crc kubenswrapper[4792]: I0929 18:57:12.363916 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:12 crc kubenswrapper[4792]: I0929 18:57:12.364253 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:12 crc kubenswrapper[4792]: I0929 18:57:12.364455 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:12Z","lastTransitionTime":"2025-09-29T18:57:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:12 crc kubenswrapper[4792]: I0929 18:57:12.467751 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:12 crc kubenswrapper[4792]: I0929 18:57:12.467792 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:12 crc kubenswrapper[4792]: I0929 18:57:12.467809 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:12 crc kubenswrapper[4792]: I0929 18:57:12.467831 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:12 crc kubenswrapper[4792]: I0929 18:57:12.467876 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:12Z","lastTransitionTime":"2025-09-29T18:57:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:12 crc kubenswrapper[4792]: I0929 18:57:12.570200 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:12 crc kubenswrapper[4792]: I0929 18:57:12.570244 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:12 crc kubenswrapper[4792]: I0929 18:57:12.570258 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:12 crc kubenswrapper[4792]: I0929 18:57:12.570276 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:12 crc kubenswrapper[4792]: I0929 18:57:12.570288 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:12Z","lastTransitionTime":"2025-09-29T18:57:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:12 crc kubenswrapper[4792]: I0929 18:57:12.672617 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:12 crc kubenswrapper[4792]: I0929 18:57:12.673071 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:12 crc kubenswrapper[4792]: I0929 18:57:12.673250 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:12 crc kubenswrapper[4792]: I0929 18:57:12.673426 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:12 crc kubenswrapper[4792]: I0929 18:57:12.673559 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:12Z","lastTransitionTime":"2025-09-29T18:57:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:12 crc kubenswrapper[4792]: I0929 18:57:12.776371 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:12 crc kubenswrapper[4792]: I0929 18:57:12.776460 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:12 crc kubenswrapper[4792]: I0929 18:57:12.776478 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:12 crc kubenswrapper[4792]: I0929 18:57:12.776504 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:12 crc kubenswrapper[4792]: I0929 18:57:12.776523 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:12Z","lastTransitionTime":"2025-09-29T18:57:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:12 crc kubenswrapper[4792]: I0929 18:57:12.879741 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:12 crc kubenswrapper[4792]: I0929 18:57:12.879786 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:12 crc kubenswrapper[4792]: I0929 18:57:12.879802 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:12 crc kubenswrapper[4792]: I0929 18:57:12.879827 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:12 crc kubenswrapper[4792]: I0929 18:57:12.879913 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:12Z","lastTransitionTime":"2025-09-29T18:57:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:12 crc kubenswrapper[4792]: I0929 18:57:12.983370 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:12 crc kubenswrapper[4792]: I0929 18:57:12.983414 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:12 crc kubenswrapper[4792]: I0929 18:57:12.983427 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:12 crc kubenswrapper[4792]: I0929 18:57:12.983445 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:12 crc kubenswrapper[4792]: I0929 18:57:12.983458 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:12Z","lastTransitionTime":"2025-09-29T18:57:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:13 crc kubenswrapper[4792]: I0929 18:57:13.014870 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 18:57:13 crc kubenswrapper[4792]: I0929 18:57:13.015111 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-v5b2m" Sep 29 18:57:13 crc kubenswrapper[4792]: E0929 18:57:13.015688 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 18:57:13 crc kubenswrapper[4792]: I0929 18:57:13.015269 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 18:57:13 crc kubenswrapper[4792]: E0929 18:57:13.015687 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-v5b2m" podUID="fd292349-0e5a-4d80-b163-193aa43c98db" Sep 29 18:57:13 crc kubenswrapper[4792]: I0929 18:57:13.015132 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 18:57:13 crc kubenswrapper[4792]: E0929 18:57:13.016574 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 18:57:13 crc kubenswrapper[4792]: E0929 18:57:13.016745 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 18:57:13 crc kubenswrapper[4792]: I0929 18:57:13.085687 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:13 crc kubenswrapper[4792]: I0929 18:57:13.085831 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:13 crc kubenswrapper[4792]: I0929 18:57:13.085905 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:13 crc kubenswrapper[4792]: I0929 18:57:13.085937 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:13 crc kubenswrapper[4792]: I0929 18:57:13.085956 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:13Z","lastTransitionTime":"2025-09-29T18:57:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:13 crc kubenswrapper[4792]: I0929 18:57:13.188582 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:13 crc kubenswrapper[4792]: I0929 18:57:13.188972 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:13 crc kubenswrapper[4792]: I0929 18:57:13.189151 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:13 crc kubenswrapper[4792]: I0929 18:57:13.189356 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:13 crc kubenswrapper[4792]: I0929 18:57:13.189540 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:13Z","lastTransitionTime":"2025-09-29T18:57:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:13 crc kubenswrapper[4792]: I0929 18:57:13.292589 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:13 crc kubenswrapper[4792]: I0929 18:57:13.292654 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:13 crc kubenswrapper[4792]: I0929 18:57:13.293591 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:13 crc kubenswrapper[4792]: I0929 18:57:13.293646 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:13 crc kubenswrapper[4792]: I0929 18:57:13.293828 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:13Z","lastTransitionTime":"2025-09-29T18:57:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:13 crc kubenswrapper[4792]: I0929 18:57:13.397177 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:13 crc kubenswrapper[4792]: I0929 18:57:13.397237 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:13 crc kubenswrapper[4792]: I0929 18:57:13.397255 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:13 crc kubenswrapper[4792]: I0929 18:57:13.397280 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:13 crc kubenswrapper[4792]: I0929 18:57:13.397298 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:13Z","lastTransitionTime":"2025-09-29T18:57:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:13 crc kubenswrapper[4792]: I0929 18:57:13.499945 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:13 crc kubenswrapper[4792]: I0929 18:57:13.499998 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:13 crc kubenswrapper[4792]: I0929 18:57:13.500013 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:13 crc kubenswrapper[4792]: I0929 18:57:13.500032 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:13 crc kubenswrapper[4792]: I0929 18:57:13.500049 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:13Z","lastTransitionTime":"2025-09-29T18:57:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:13 crc kubenswrapper[4792]: I0929 18:57:13.602244 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:13 crc kubenswrapper[4792]: I0929 18:57:13.602328 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:13 crc kubenswrapper[4792]: I0929 18:57:13.602348 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:13 crc kubenswrapper[4792]: I0929 18:57:13.602370 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:13 crc kubenswrapper[4792]: I0929 18:57:13.602387 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:13Z","lastTransitionTime":"2025-09-29T18:57:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:13 crc kubenswrapper[4792]: I0929 18:57:13.705468 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:13 crc kubenswrapper[4792]: I0929 18:57:13.705734 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:13 crc kubenswrapper[4792]: I0929 18:57:13.705948 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:13 crc kubenswrapper[4792]: I0929 18:57:13.706038 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:13 crc kubenswrapper[4792]: I0929 18:57:13.706110 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:13Z","lastTransitionTime":"2025-09-29T18:57:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:13 crc kubenswrapper[4792]: I0929 18:57:13.809763 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:13 crc kubenswrapper[4792]: I0929 18:57:13.810257 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:13 crc kubenswrapper[4792]: I0929 18:57:13.810425 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:13 crc kubenswrapper[4792]: I0929 18:57:13.810662 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:13 crc kubenswrapper[4792]: I0929 18:57:13.810950 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:13Z","lastTransitionTime":"2025-09-29T18:57:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:13 crc kubenswrapper[4792]: I0929 18:57:13.914054 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:13 crc kubenswrapper[4792]: I0929 18:57:13.914118 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:13 crc kubenswrapper[4792]: I0929 18:57:13.914138 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:13 crc kubenswrapper[4792]: I0929 18:57:13.914164 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:13 crc kubenswrapper[4792]: I0929 18:57:13.914183 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:13Z","lastTransitionTime":"2025-09-29T18:57:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:14 crc kubenswrapper[4792]: I0929 18:57:14.016125 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:14 crc kubenswrapper[4792]: I0929 18:57:14.016156 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:14 crc kubenswrapper[4792]: I0929 18:57:14.016166 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:14 crc kubenswrapper[4792]: I0929 18:57:14.016177 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:14 crc kubenswrapper[4792]: I0929 18:57:14.016203 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:14Z","lastTransitionTime":"2025-09-29T18:57:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:14 crc kubenswrapper[4792]: I0929 18:57:14.118752 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:14 crc kubenswrapper[4792]: I0929 18:57:14.119062 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:14 crc kubenswrapper[4792]: I0929 18:57:14.119133 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:14 crc kubenswrapper[4792]: I0929 18:57:14.119199 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:14 crc kubenswrapper[4792]: I0929 18:57:14.119290 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:14Z","lastTransitionTime":"2025-09-29T18:57:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:14 crc kubenswrapper[4792]: I0929 18:57:14.221502 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:14 crc kubenswrapper[4792]: I0929 18:57:14.221775 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:14 crc kubenswrapper[4792]: I0929 18:57:14.221862 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:14 crc kubenswrapper[4792]: I0929 18:57:14.221927 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:14 crc kubenswrapper[4792]: I0929 18:57:14.221985 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:14Z","lastTransitionTime":"2025-09-29T18:57:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:14 crc kubenswrapper[4792]: I0929 18:57:14.324345 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:14 crc kubenswrapper[4792]: I0929 18:57:14.324397 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:14 crc kubenswrapper[4792]: I0929 18:57:14.324408 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:14 crc kubenswrapper[4792]: I0929 18:57:14.324428 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:14 crc kubenswrapper[4792]: I0929 18:57:14.324440 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:14Z","lastTransitionTime":"2025-09-29T18:57:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:14 crc kubenswrapper[4792]: I0929 18:57:14.427968 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:14 crc kubenswrapper[4792]: I0929 18:57:14.428029 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:14 crc kubenswrapper[4792]: I0929 18:57:14.428042 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:14 crc kubenswrapper[4792]: I0929 18:57:14.428079 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:14 crc kubenswrapper[4792]: I0929 18:57:14.428093 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:14Z","lastTransitionTime":"2025-09-29T18:57:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:14 crc kubenswrapper[4792]: I0929 18:57:14.530785 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:14 crc kubenswrapper[4792]: I0929 18:57:14.530877 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:14 crc kubenswrapper[4792]: I0929 18:57:14.530892 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:14 crc kubenswrapper[4792]: I0929 18:57:14.530910 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:14 crc kubenswrapper[4792]: I0929 18:57:14.530921 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:14Z","lastTransitionTime":"2025-09-29T18:57:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:14 crc kubenswrapper[4792]: I0929 18:57:14.633454 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:14 crc kubenswrapper[4792]: I0929 18:57:14.633488 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:14 crc kubenswrapper[4792]: I0929 18:57:14.633497 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:14 crc kubenswrapper[4792]: I0929 18:57:14.633509 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:14 crc kubenswrapper[4792]: I0929 18:57:14.633517 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:14Z","lastTransitionTime":"2025-09-29T18:57:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:14 crc kubenswrapper[4792]: I0929 18:57:14.735944 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:14 crc kubenswrapper[4792]: I0929 18:57:14.736001 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:14 crc kubenswrapper[4792]: I0929 18:57:14.736026 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:14 crc kubenswrapper[4792]: I0929 18:57:14.736048 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:14 crc kubenswrapper[4792]: I0929 18:57:14.736065 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:14Z","lastTransitionTime":"2025-09-29T18:57:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:14 crc kubenswrapper[4792]: I0929 18:57:14.838613 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:14 crc kubenswrapper[4792]: I0929 18:57:14.838663 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:14 crc kubenswrapper[4792]: I0929 18:57:14.838677 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:14 crc kubenswrapper[4792]: I0929 18:57:14.838696 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:14 crc kubenswrapper[4792]: I0929 18:57:14.838710 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:14Z","lastTransitionTime":"2025-09-29T18:57:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:14 crc kubenswrapper[4792]: I0929 18:57:14.941277 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:14 crc kubenswrapper[4792]: I0929 18:57:14.941564 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:14 crc kubenswrapper[4792]: I0929 18:57:14.941641 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:14 crc kubenswrapper[4792]: I0929 18:57:14.941712 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:14 crc kubenswrapper[4792]: I0929 18:57:14.941857 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:14Z","lastTransitionTime":"2025-09-29T18:57:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:15 crc kubenswrapper[4792]: I0929 18:57:15.015057 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 18:57:15 crc kubenswrapper[4792]: I0929 18:57:15.015056 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-v5b2m" Sep 29 18:57:15 crc kubenswrapper[4792]: I0929 18:57:15.015068 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 18:57:15 crc kubenswrapper[4792]: I0929 18:57:15.015130 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 18:57:15 crc kubenswrapper[4792]: E0929 18:57:15.015295 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 18:57:15 crc kubenswrapper[4792]: E0929 18:57:15.015904 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 18:57:15 crc kubenswrapper[4792]: E0929 18:57:15.015981 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-v5b2m" podUID="fd292349-0e5a-4d80-b163-193aa43c98db" Sep 29 18:57:15 crc kubenswrapper[4792]: E0929 18:57:15.016025 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 18:57:15 crc kubenswrapper[4792]: I0929 18:57:15.044694 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:15 crc kubenswrapper[4792]: I0929 18:57:15.044940 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:15 crc kubenswrapper[4792]: I0929 18:57:15.045152 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:15 crc kubenswrapper[4792]: I0929 18:57:15.045310 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:15 crc kubenswrapper[4792]: I0929 18:57:15.045452 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:15Z","lastTransitionTime":"2025-09-29T18:57:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:15 crc kubenswrapper[4792]: I0929 18:57:15.148687 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:15 crc kubenswrapper[4792]: I0929 18:57:15.149075 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:15 crc kubenswrapper[4792]: I0929 18:57:15.149240 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:15 crc kubenswrapper[4792]: I0929 18:57:15.149394 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:15 crc kubenswrapper[4792]: I0929 18:57:15.149545 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:15Z","lastTransitionTime":"2025-09-29T18:57:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:15 crc kubenswrapper[4792]: I0929 18:57:15.190260 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/fd292349-0e5a-4d80-b163-193aa43c98db-metrics-certs\") pod \"network-metrics-daemon-v5b2m\" (UID: \"fd292349-0e5a-4d80-b163-193aa43c98db\") " pod="openshift-multus/network-metrics-daemon-v5b2m" Sep 29 18:57:15 crc kubenswrapper[4792]: E0929 18:57:15.190483 4792 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Sep 29 18:57:15 crc kubenswrapper[4792]: E0929 18:57:15.190557 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/fd292349-0e5a-4d80-b163-193aa43c98db-metrics-certs podName:fd292349-0e5a-4d80-b163-193aa43c98db nodeName:}" failed. No retries permitted until 2025-09-29 18:57:23.190532059 +0000 UTC m=+55.183839495 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/fd292349-0e5a-4d80-b163-193aa43c98db-metrics-certs") pod "network-metrics-daemon-v5b2m" (UID: "fd292349-0e5a-4d80-b163-193aa43c98db") : object "openshift-multus"/"metrics-daemon-secret" not registered Sep 29 18:57:15 crc kubenswrapper[4792]: I0929 18:57:15.253248 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:15 crc kubenswrapper[4792]: I0929 18:57:15.253327 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:15 crc kubenswrapper[4792]: I0929 18:57:15.253350 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:15 crc kubenswrapper[4792]: I0929 18:57:15.253384 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:15 crc kubenswrapper[4792]: I0929 18:57:15.253409 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:15Z","lastTransitionTime":"2025-09-29T18:57:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:15 crc kubenswrapper[4792]: I0929 18:57:15.282943 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:15 crc kubenswrapper[4792]: I0929 18:57:15.283019 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:15 crc kubenswrapper[4792]: I0929 18:57:15.283041 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:15 crc kubenswrapper[4792]: I0929 18:57:15.283070 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:15 crc kubenswrapper[4792]: I0929 18:57:15.283087 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:15Z","lastTransitionTime":"2025-09-29T18:57:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:15 crc kubenswrapper[4792]: E0929 18:57:15.307088 4792 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T18:57:15Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:15Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T18:57:15Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:15Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T18:57:15Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:15Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T18:57:15Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:15Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"2b56982a-4dd9-4681-8997-0d414fe55985\\\",\\\"systemUUID\\\":\\\"798197c6-3029-4938-8b57-256852c71a3e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:15Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:15 crc kubenswrapper[4792]: I0929 18:57:15.312952 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:15 crc kubenswrapper[4792]: I0929 18:57:15.313249 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:15 crc kubenswrapper[4792]: I0929 18:57:15.313403 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:15 crc kubenswrapper[4792]: I0929 18:57:15.313540 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:15 crc kubenswrapper[4792]: I0929 18:57:15.313657 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:15Z","lastTransitionTime":"2025-09-29T18:57:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:15 crc kubenswrapper[4792]: E0929 18:57:15.335802 4792 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T18:57:15Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:15Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T18:57:15Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:15Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T18:57:15Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:15Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T18:57:15Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:15Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"2b56982a-4dd9-4681-8997-0d414fe55985\\\",\\\"systemUUID\\\":\\\"798197c6-3029-4938-8b57-256852c71a3e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:15Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:15 crc kubenswrapper[4792]: I0929 18:57:15.341452 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:15 crc kubenswrapper[4792]: I0929 18:57:15.341530 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:15 crc kubenswrapper[4792]: I0929 18:57:15.341553 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:15 crc kubenswrapper[4792]: I0929 18:57:15.341581 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:15 crc kubenswrapper[4792]: I0929 18:57:15.341604 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:15Z","lastTransitionTime":"2025-09-29T18:57:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:15 crc kubenswrapper[4792]: E0929 18:57:15.367130 4792 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T18:57:15Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:15Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T18:57:15Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:15Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T18:57:15Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:15Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T18:57:15Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:15Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"2b56982a-4dd9-4681-8997-0d414fe55985\\\",\\\"systemUUID\\\":\\\"798197c6-3029-4938-8b57-256852c71a3e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:15Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:15 crc kubenswrapper[4792]: I0929 18:57:15.373626 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:15 crc kubenswrapper[4792]: I0929 18:57:15.373957 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:15 crc kubenswrapper[4792]: I0929 18:57:15.374112 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:15 crc kubenswrapper[4792]: I0929 18:57:15.374253 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:15 crc kubenswrapper[4792]: I0929 18:57:15.374395 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:15Z","lastTransitionTime":"2025-09-29T18:57:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:15 crc kubenswrapper[4792]: E0929 18:57:15.397377 4792 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T18:57:15Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:15Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T18:57:15Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:15Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T18:57:15Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:15Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T18:57:15Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:15Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"2b56982a-4dd9-4681-8997-0d414fe55985\\\",\\\"systemUUID\\\":\\\"798197c6-3029-4938-8b57-256852c71a3e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:15Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:15 crc kubenswrapper[4792]: I0929 18:57:15.402692 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:15 crc kubenswrapper[4792]: I0929 18:57:15.402737 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:15 crc kubenswrapper[4792]: I0929 18:57:15.402751 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:15 crc kubenswrapper[4792]: I0929 18:57:15.402768 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:15 crc kubenswrapper[4792]: I0929 18:57:15.402780 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:15Z","lastTransitionTime":"2025-09-29T18:57:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:15 crc kubenswrapper[4792]: E0929 18:57:15.417808 4792 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T18:57:15Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:15Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T18:57:15Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:15Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T18:57:15Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:15Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T18:57:15Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:15Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"2b56982a-4dd9-4681-8997-0d414fe55985\\\",\\\"systemUUID\\\":\\\"798197c6-3029-4938-8b57-256852c71a3e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:15Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:15 crc kubenswrapper[4792]: E0929 18:57:15.418045 4792 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Sep 29 18:57:15 crc kubenswrapper[4792]: I0929 18:57:15.420010 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:15 crc kubenswrapper[4792]: I0929 18:57:15.420057 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:15 crc kubenswrapper[4792]: I0929 18:57:15.420069 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:15 crc kubenswrapper[4792]: I0929 18:57:15.420086 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:15 crc kubenswrapper[4792]: I0929 18:57:15.420099 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:15Z","lastTransitionTime":"2025-09-29T18:57:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:15 crc kubenswrapper[4792]: I0929 18:57:15.523216 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:15 crc kubenswrapper[4792]: I0929 18:57:15.523589 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:15 crc kubenswrapper[4792]: I0929 18:57:15.523952 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:15 crc kubenswrapper[4792]: I0929 18:57:15.524273 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:15 crc kubenswrapper[4792]: I0929 18:57:15.524500 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:15Z","lastTransitionTime":"2025-09-29T18:57:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:15 crc kubenswrapper[4792]: I0929 18:57:15.629021 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:15 crc kubenswrapper[4792]: I0929 18:57:15.630003 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:15 crc kubenswrapper[4792]: I0929 18:57:15.630198 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:15 crc kubenswrapper[4792]: I0929 18:57:15.630404 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:15 crc kubenswrapper[4792]: I0929 18:57:15.630708 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:15Z","lastTransitionTime":"2025-09-29T18:57:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:15 crc kubenswrapper[4792]: I0929 18:57:15.734666 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:15 crc kubenswrapper[4792]: I0929 18:57:15.734738 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:15 crc kubenswrapper[4792]: I0929 18:57:15.734758 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:15 crc kubenswrapper[4792]: I0929 18:57:15.734785 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:15 crc kubenswrapper[4792]: I0929 18:57:15.734803 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:15Z","lastTransitionTime":"2025-09-29T18:57:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:15 crc kubenswrapper[4792]: I0929 18:57:15.837830 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:15 crc kubenswrapper[4792]: I0929 18:57:15.837933 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:15 crc kubenswrapper[4792]: I0929 18:57:15.837952 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:15 crc kubenswrapper[4792]: I0929 18:57:15.837984 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:15 crc kubenswrapper[4792]: I0929 18:57:15.838004 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:15Z","lastTransitionTime":"2025-09-29T18:57:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:15 crc kubenswrapper[4792]: I0929 18:57:15.941438 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:15 crc kubenswrapper[4792]: I0929 18:57:15.941513 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:15 crc kubenswrapper[4792]: I0929 18:57:15.941534 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:15 crc kubenswrapper[4792]: I0929 18:57:15.941562 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:15 crc kubenswrapper[4792]: I0929 18:57:15.941584 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:15Z","lastTransitionTime":"2025-09-29T18:57:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:16 crc kubenswrapper[4792]: I0929 18:57:16.045191 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:16 crc kubenswrapper[4792]: I0929 18:57:16.045243 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:16 crc kubenswrapper[4792]: I0929 18:57:16.045272 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:16 crc kubenswrapper[4792]: I0929 18:57:16.045293 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:16 crc kubenswrapper[4792]: I0929 18:57:16.045306 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:16Z","lastTransitionTime":"2025-09-29T18:57:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:16 crc kubenswrapper[4792]: I0929 18:57:16.148668 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:16 crc kubenswrapper[4792]: I0929 18:57:16.148743 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:16 crc kubenswrapper[4792]: I0929 18:57:16.148765 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:16 crc kubenswrapper[4792]: I0929 18:57:16.148797 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:16 crc kubenswrapper[4792]: I0929 18:57:16.148822 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:16Z","lastTransitionTime":"2025-09-29T18:57:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:16 crc kubenswrapper[4792]: I0929 18:57:16.253442 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:16 crc kubenswrapper[4792]: I0929 18:57:16.253509 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:16 crc kubenswrapper[4792]: I0929 18:57:16.253527 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:16 crc kubenswrapper[4792]: I0929 18:57:16.253558 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:16 crc kubenswrapper[4792]: I0929 18:57:16.253583 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:16Z","lastTransitionTime":"2025-09-29T18:57:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:16 crc kubenswrapper[4792]: I0929 18:57:16.356913 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:16 crc kubenswrapper[4792]: I0929 18:57:16.356986 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:16 crc kubenswrapper[4792]: I0929 18:57:16.356999 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:16 crc kubenswrapper[4792]: I0929 18:57:16.357017 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:16 crc kubenswrapper[4792]: I0929 18:57:16.357032 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:16Z","lastTransitionTime":"2025-09-29T18:57:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:16 crc kubenswrapper[4792]: I0929 18:57:16.459870 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:16 crc kubenswrapper[4792]: I0929 18:57:16.459921 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:16 crc kubenswrapper[4792]: I0929 18:57:16.459933 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:16 crc kubenswrapper[4792]: I0929 18:57:16.459948 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:16 crc kubenswrapper[4792]: I0929 18:57:16.459960 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:16Z","lastTransitionTime":"2025-09-29T18:57:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:16 crc kubenswrapper[4792]: I0929 18:57:16.562952 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:16 crc kubenswrapper[4792]: I0929 18:57:16.562994 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:16 crc kubenswrapper[4792]: I0929 18:57:16.563003 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:16 crc kubenswrapper[4792]: I0929 18:57:16.563016 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:16 crc kubenswrapper[4792]: I0929 18:57:16.563027 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:16Z","lastTransitionTime":"2025-09-29T18:57:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:16 crc kubenswrapper[4792]: I0929 18:57:16.665492 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:16 crc kubenswrapper[4792]: I0929 18:57:16.666258 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:16 crc kubenswrapper[4792]: I0929 18:57:16.666355 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:16 crc kubenswrapper[4792]: I0929 18:57:16.666455 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:16 crc kubenswrapper[4792]: I0929 18:57:16.666544 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:16Z","lastTransitionTime":"2025-09-29T18:57:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:16 crc kubenswrapper[4792]: I0929 18:57:16.769010 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:16 crc kubenswrapper[4792]: I0929 18:57:16.769393 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:16 crc kubenswrapper[4792]: I0929 18:57:16.769599 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:16 crc kubenswrapper[4792]: I0929 18:57:16.769784 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:16 crc kubenswrapper[4792]: I0929 18:57:16.770036 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:16Z","lastTransitionTime":"2025-09-29T18:57:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:16 crc kubenswrapper[4792]: I0929 18:57:16.873020 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:16 crc kubenswrapper[4792]: I0929 18:57:16.873066 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:16 crc kubenswrapper[4792]: I0929 18:57:16.873077 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:16 crc kubenswrapper[4792]: I0929 18:57:16.873098 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:16 crc kubenswrapper[4792]: I0929 18:57:16.873109 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:16Z","lastTransitionTime":"2025-09-29T18:57:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:16 crc kubenswrapper[4792]: I0929 18:57:16.975918 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:16 crc kubenswrapper[4792]: I0929 18:57:16.976302 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:16 crc kubenswrapper[4792]: I0929 18:57:16.976496 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:16 crc kubenswrapper[4792]: I0929 18:57:16.976709 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:16 crc kubenswrapper[4792]: I0929 18:57:16.976931 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:16Z","lastTransitionTime":"2025-09-29T18:57:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:17 crc kubenswrapper[4792]: I0929 18:57:17.014548 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-v5b2m" Sep 29 18:57:17 crc kubenswrapper[4792]: I0929 18:57:17.014580 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 18:57:17 crc kubenswrapper[4792]: I0929 18:57:17.014792 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 18:57:17 crc kubenswrapper[4792]: I0929 18:57:17.014885 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 18:57:17 crc kubenswrapper[4792]: E0929 18:57:17.014895 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-v5b2m" podUID="fd292349-0e5a-4d80-b163-193aa43c98db" Sep 29 18:57:17 crc kubenswrapper[4792]: E0929 18:57:17.015136 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 18:57:17 crc kubenswrapper[4792]: E0929 18:57:17.015291 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 18:57:17 crc kubenswrapper[4792]: E0929 18:57:17.015385 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 18:57:17 crc kubenswrapper[4792]: I0929 18:57:17.079895 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:17 crc kubenswrapper[4792]: I0929 18:57:17.079943 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:17 crc kubenswrapper[4792]: I0929 18:57:17.079955 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:17 crc kubenswrapper[4792]: I0929 18:57:17.079972 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:17 crc kubenswrapper[4792]: I0929 18:57:17.079984 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:17Z","lastTransitionTime":"2025-09-29T18:57:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:17 crc kubenswrapper[4792]: I0929 18:57:17.182517 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:17 crc kubenswrapper[4792]: I0929 18:57:17.182567 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:17 crc kubenswrapper[4792]: I0929 18:57:17.182580 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:17 crc kubenswrapper[4792]: I0929 18:57:17.182617 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:17 crc kubenswrapper[4792]: I0929 18:57:17.182633 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:17Z","lastTransitionTime":"2025-09-29T18:57:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:17 crc kubenswrapper[4792]: I0929 18:57:17.284681 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:17 crc kubenswrapper[4792]: I0929 18:57:17.284708 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:17 crc kubenswrapper[4792]: I0929 18:57:17.284717 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:17 crc kubenswrapper[4792]: I0929 18:57:17.284728 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:17 crc kubenswrapper[4792]: I0929 18:57:17.284736 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:17Z","lastTransitionTime":"2025-09-29T18:57:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:17 crc kubenswrapper[4792]: I0929 18:57:17.387208 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:17 crc kubenswrapper[4792]: I0929 18:57:17.387490 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:17 crc kubenswrapper[4792]: I0929 18:57:17.387564 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:17 crc kubenswrapper[4792]: I0929 18:57:17.387661 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:17 crc kubenswrapper[4792]: I0929 18:57:17.387737 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:17Z","lastTransitionTime":"2025-09-29T18:57:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:17 crc kubenswrapper[4792]: I0929 18:57:17.490145 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:17 crc kubenswrapper[4792]: I0929 18:57:17.490196 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:17 crc kubenswrapper[4792]: I0929 18:57:17.490212 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:17 crc kubenswrapper[4792]: I0929 18:57:17.490233 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:17 crc kubenswrapper[4792]: I0929 18:57:17.490249 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:17Z","lastTransitionTime":"2025-09-29T18:57:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:17 crc kubenswrapper[4792]: I0929 18:57:17.593251 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:17 crc kubenswrapper[4792]: I0929 18:57:17.593290 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:17 crc kubenswrapper[4792]: I0929 18:57:17.593307 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:17 crc kubenswrapper[4792]: I0929 18:57:17.593324 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:17 crc kubenswrapper[4792]: I0929 18:57:17.593334 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:17Z","lastTransitionTime":"2025-09-29T18:57:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:17 crc kubenswrapper[4792]: I0929 18:57:17.696302 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:17 crc kubenswrapper[4792]: I0929 18:57:17.696337 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:17 crc kubenswrapper[4792]: I0929 18:57:17.696348 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:17 crc kubenswrapper[4792]: I0929 18:57:17.696365 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:17 crc kubenswrapper[4792]: I0929 18:57:17.696376 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:17Z","lastTransitionTime":"2025-09-29T18:57:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:17 crc kubenswrapper[4792]: I0929 18:57:17.799635 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:17 crc kubenswrapper[4792]: I0929 18:57:17.799671 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:17 crc kubenswrapper[4792]: I0929 18:57:17.799679 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:17 crc kubenswrapper[4792]: I0929 18:57:17.799694 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:17 crc kubenswrapper[4792]: I0929 18:57:17.799705 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:17Z","lastTransitionTime":"2025-09-29T18:57:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:17 crc kubenswrapper[4792]: I0929 18:57:17.902105 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:17 crc kubenswrapper[4792]: I0929 18:57:17.902163 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:17 crc kubenswrapper[4792]: I0929 18:57:17.902179 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:17 crc kubenswrapper[4792]: I0929 18:57:17.902202 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:17 crc kubenswrapper[4792]: I0929 18:57:17.902218 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:17Z","lastTransitionTime":"2025-09-29T18:57:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:18 crc kubenswrapper[4792]: I0929 18:57:18.005086 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:18 crc kubenswrapper[4792]: I0929 18:57:18.005297 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:18 crc kubenswrapper[4792]: I0929 18:57:18.005359 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:18 crc kubenswrapper[4792]: I0929 18:57:18.005461 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:18 crc kubenswrapper[4792]: I0929 18:57:18.005570 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:18Z","lastTransitionTime":"2025-09-29T18:57:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:18 crc kubenswrapper[4792]: I0929 18:57:18.108237 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:18 crc kubenswrapper[4792]: I0929 18:57:18.108297 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:18 crc kubenswrapper[4792]: I0929 18:57:18.108314 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:18 crc kubenswrapper[4792]: I0929 18:57:18.108338 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:18 crc kubenswrapper[4792]: I0929 18:57:18.108355 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:18Z","lastTransitionTime":"2025-09-29T18:57:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:18 crc kubenswrapper[4792]: I0929 18:57:18.211191 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:18 crc kubenswrapper[4792]: I0929 18:57:18.211224 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:18 crc kubenswrapper[4792]: I0929 18:57:18.211233 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:18 crc kubenswrapper[4792]: I0929 18:57:18.211246 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:18 crc kubenswrapper[4792]: I0929 18:57:18.211255 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:18Z","lastTransitionTime":"2025-09-29T18:57:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:18 crc kubenswrapper[4792]: I0929 18:57:18.313576 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:18 crc kubenswrapper[4792]: I0929 18:57:18.313614 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:18 crc kubenswrapper[4792]: I0929 18:57:18.313626 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:18 crc kubenswrapper[4792]: I0929 18:57:18.313642 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:18 crc kubenswrapper[4792]: I0929 18:57:18.313656 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:18Z","lastTransitionTime":"2025-09-29T18:57:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:18 crc kubenswrapper[4792]: I0929 18:57:18.416243 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:18 crc kubenswrapper[4792]: I0929 18:57:18.416628 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:18 crc kubenswrapper[4792]: I0929 18:57:18.416792 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:18 crc kubenswrapper[4792]: I0929 18:57:18.416998 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:18 crc kubenswrapper[4792]: I0929 18:57:18.417167 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:18Z","lastTransitionTime":"2025-09-29T18:57:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:18 crc kubenswrapper[4792]: I0929 18:57:18.519351 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:18 crc kubenswrapper[4792]: I0929 18:57:18.519391 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:18 crc kubenswrapper[4792]: I0929 18:57:18.519403 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:18 crc kubenswrapper[4792]: I0929 18:57:18.519420 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:18 crc kubenswrapper[4792]: I0929 18:57:18.519428 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:18Z","lastTransitionTime":"2025-09-29T18:57:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:18 crc kubenswrapper[4792]: I0929 18:57:18.622716 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:18 crc kubenswrapper[4792]: I0929 18:57:18.622779 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:18 crc kubenswrapper[4792]: I0929 18:57:18.622799 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:18 crc kubenswrapper[4792]: I0929 18:57:18.622823 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:18 crc kubenswrapper[4792]: I0929 18:57:18.622840 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:18Z","lastTransitionTime":"2025-09-29T18:57:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:18 crc kubenswrapper[4792]: I0929 18:57:18.725882 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:18 crc kubenswrapper[4792]: I0929 18:57:18.725937 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:18 crc kubenswrapper[4792]: I0929 18:57:18.725949 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:18 crc kubenswrapper[4792]: I0929 18:57:18.725967 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:18 crc kubenswrapper[4792]: I0929 18:57:18.725980 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:18Z","lastTransitionTime":"2025-09-29T18:57:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:18 crc kubenswrapper[4792]: I0929 18:57:18.827820 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:18 crc kubenswrapper[4792]: I0929 18:57:18.827896 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:18 crc kubenswrapper[4792]: I0929 18:57:18.827911 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:18 crc kubenswrapper[4792]: I0929 18:57:18.827929 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:18 crc kubenswrapper[4792]: I0929 18:57:18.827941 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:18Z","lastTransitionTime":"2025-09-29T18:57:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:18 crc kubenswrapper[4792]: I0929 18:57:18.930606 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:18 crc kubenswrapper[4792]: I0929 18:57:18.930663 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:18 crc kubenswrapper[4792]: I0929 18:57:18.930689 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:18 crc kubenswrapper[4792]: I0929 18:57:18.930718 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:18 crc kubenswrapper[4792]: I0929 18:57:18.930740 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:18Z","lastTransitionTime":"2025-09-29T18:57:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:19 crc kubenswrapper[4792]: I0929 18:57:19.014966 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 18:57:19 crc kubenswrapper[4792]: E0929 18:57:19.015142 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 18:57:19 crc kubenswrapper[4792]: I0929 18:57:19.015632 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-v5b2m" Sep 29 18:57:19 crc kubenswrapper[4792]: E0929 18:57:19.015782 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-v5b2m" podUID="fd292349-0e5a-4d80-b163-193aa43c98db" Sep 29 18:57:19 crc kubenswrapper[4792]: I0929 18:57:19.016026 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 18:57:19 crc kubenswrapper[4792]: E0929 18:57:19.016350 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 18:57:19 crc kubenswrapper[4792]: I0929 18:57:19.016633 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 18:57:19 crc kubenswrapper[4792]: E0929 18:57:19.017390 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 18:57:19 crc kubenswrapper[4792]: I0929 18:57:19.033359 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:19 crc kubenswrapper[4792]: I0929 18:57:19.033421 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:19 crc kubenswrapper[4792]: I0929 18:57:19.033439 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:19 crc kubenswrapper[4792]: I0929 18:57:19.033461 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:19 crc kubenswrapper[4792]: I0929 18:57:19.033478 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:19Z","lastTransitionTime":"2025-09-29T18:57:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:19 crc kubenswrapper[4792]: I0929 18:57:19.039422 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:19Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:19 crc kubenswrapper[4792]: I0929 18:57:19.054172 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-4gmtk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b84b9e91-b50e-4271-bfc8-be15652128c5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b32084075b7423c8211ca56595a2eb11add581b500043804cb09f13d07788bd6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lc999\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:55Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-4gmtk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:19Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:19 crc kubenswrapper[4792]: I0929 18:57:19.068456 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"49187618-8fed-4b0f-bdf8-800408f708fc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://325b543480e9e1abd49c6ce98398a79ef51983b8035774b2e88447ee547733c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://12d3875b8db9620798f766024b1bc43b78759f42e467b67aaf87f0b0154a8fad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://100ab44da711fddded7f88aa053b6a47d1c8302557d9ae6a56d4f744140e34bd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://af34e705a941f92c031edf3d214a902640010036401914f60e598a46043d5eb3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:29Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:19Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:19 crc kubenswrapper[4792]: I0929 18:57:19.091044 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:52Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:52Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cb29207afd9a5fb06242890aaf6d32f2f789cbf824b0246706e7214486ac529c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:19Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:19 crc kubenswrapper[4792]: I0929 18:57:19.106473 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0ae66548-086e-4ca9-bd6f-281ce46e7557\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b27d8e307d9f6545acd48d9a838dc98fec84ca2e48b357966af22144b8cd415f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://305645f1f10b20984067c3d0d32bc9a5936e191faecff2bb494be005fc471c65\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:53Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-p5q59\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:19Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:19 crc kubenswrapper[4792]: I0929 18:57:19.130107 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-rqbjv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"67c58ee5-e056-4e3e-91ed-a116350f2408\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://17470ef608c6b717c0346349c1e72046e200b3879df2772778878b0e83c05b7e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:57:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ms9xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cdd799a430b3a444b7ba74ae8c285de28790049390a462485812fe117f9dfbe8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cdd799a430b3a444b7ba74ae8c285de28790049390a462485812fe117f9dfbe8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T18:56:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ms9xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1d779fed3cf67ba40d6664f26d829858ec14749c48c09678b73d7fb8fe73c827\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1d779fed3cf67ba40d6664f26d829858ec14749c48c09678b73d7fb8fe73c827\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T18:56:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ms9xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3094a1172df2fd98e699c4d368a14584e51bce43389c9c6432e24e78d460a3eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3094a1172df2fd98e699c4d368a14584e51bce43389c9c6432e24e78d460a3eb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T18:56:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ms9xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://79dfa5c03ec31df7b6477646c437b7490658801c0b8f7fac5e9149e4c7a882bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://79dfa5c03ec31df7b6477646c437b7490658801c0b8f7fac5e9149e4c7a882bf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T18:56:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ms9xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://aed6e427a87d4a4617a9d1c9a4d37cf2f9815d1759336026545d563b1f9b6976\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aed6e427a87d4a4617a9d1c9a4d37cf2f9815d1759336026545d563b1f9b6976\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T18:57:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ms9xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0830e9f46c282fc2be6beeea2654758eb0b3a0a86b802f495928c846bc49f7ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0830e9f46c282fc2be6beeea2654758eb0b3a0a86b802f495928c846bc49f7ed\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T18:57:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T18:57:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ms9xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:53Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-rqbjv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:19Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:19 crc kubenswrapper[4792]: I0929 18:57:19.136401 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:19 crc kubenswrapper[4792]: I0929 18:57:19.136785 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:19 crc kubenswrapper[4792]: I0929 18:57:19.137094 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:19 crc kubenswrapper[4792]: I0929 18:57:19.137284 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:19 crc kubenswrapper[4792]: I0929 18:57:19.137453 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:19Z","lastTransitionTime":"2025-09-29T18:57:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:19 crc kubenswrapper[4792]: I0929 18:57:19.149276 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-v5b2m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fd292349-0e5a-4d80-b163-193aa43c98db\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:07Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:07Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:07Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d8ps7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d8ps7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:57:07Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-v5b2m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:19Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:19 crc kubenswrapper[4792]: I0929 18:57:19.173435 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"10bc9cb6-78d3-43a6-8276-db1cb1c116e0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:29Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:29Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://238035b6ad975064a44e7e9e760ae9f09c9ff2735ecc69f65df3fc3176f6d3b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://659f798faed7d5c35bf7959b8e42a37f2289854714d513962716cb73a0674a27\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://38d29fa86782b007a60cbc1ea8f2ccc250dabb38c84eb61931b05fbc170e6538\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f292b6e3b4a31cc851066c3112871836e2c896dea8913da0d3c5579fe5ebb65\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0fdb8c0687f0a5ce31078f6d7a9b643c41ad23199eff4b2878403ee5fd31f69f\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T18:56:45Z\\\",\\\"message\\\":\\\"9 18:56:45.310775 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0929 18:56:45.310981 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI0929 18:56:45.312802 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0929 18:56:45.312836 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0929 18:56:45.312870 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0929 18:56:45.312900 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0929 18:56:45.312941 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI0929 18:56:45.312944 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0929 18:56:45.312985 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI0929 18:56:45.313033 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI0929 18:56:45.313121 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-682747971/tls.crt::/tmp/serving-cert-682747971/tls.key\\\\\\\"\\\\nI0929 18:56:45.313163 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-682747971/tls.crt::/tmp/serving-cert-682747971/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1759172204\\\\\\\\\\\\\\\" (2025-09-29 18:56:43 +0000 UTC to 2025-10-29 18:56:44 +0000 UTC (now=2025-09-29 18:56:45.313121069 +0000 UTC))\\\\\\\"\\\\nF0929 18:56:45.313206 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:44Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:57:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://07171a8c3c7812c016ee534ff1332f697d2b2cdfa70fc9d94ae6a5f312e0e433\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:32Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6e733b73d0293211bf2e8e97dc7db49c34e8ac1ef1e6e19013183d8518345959\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6e733b73d0293211bf2e8e97dc7db49c34e8ac1ef1e6e19013183d8518345959\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T18:56:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:29Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:19Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:19 crc kubenswrapper[4792]: I0929 18:57:19.194412 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ba8b9fef5faf6504a0e363f092cc9f60b03723775a0a0624b6302b3dac43a7ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:19Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:19 crc kubenswrapper[4792]: I0929 18:57:19.215927 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8067c4cf598ce2d361c4a76b51ef3cf14d1fc84ad7ee193d76e20cd980f197be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://693ef3ee15f0b8762a16adc20435397e073dea4b0028f4175899cb956eaab303\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:19Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:19 crc kubenswrapper[4792]: I0929 18:57:19.242995 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:19 crc kubenswrapper[4792]: I0929 18:57:19.243043 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:19 crc kubenswrapper[4792]: I0929 18:57:19.243057 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:19 crc kubenswrapper[4792]: I0929 18:57:19.243077 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:19 crc kubenswrapper[4792]: I0929 18:57:19.242946 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-hr4cm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"716c5fdd-0e02-4066-9210-93d805b6fe81\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:53Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:53Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7b64445ce1e067504326c5005136522f885ba8796579cfb651019d2372a89173\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://46a3cf64e8fd5f5c75be0dd56175bd00e95e2780c73e39558e3b68ca1e6a44bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3408e50d82d1e7f50d9cd4fb2b4e078059bbc4daba10ca93c3cab56d4fe190be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a5023531ae972c8f19f5fbf8cdb3c4040f1b63d5d7b9d00e885607f0f84c88a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e9625b3628f291ecaa686da104d719695bd8c46eb46d08f9eccab27a2013627\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3a44c0899a9afeaa74bb22565c3f9514603ce1b83f9794539f677d067785925\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e402552ce11d3c59a676cb86052c03bd0f6ef0c17950b38a05cb3ae1f56e205d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a69c10a011d015e4ba98c0b6bdfe1a4d2644bb658d60896c45798ecb564cb563\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T18:57:04Z\\\",\\\"message\\\":\\\".go:208] Removed *v1.Namespace event handler 5\\\\nI0929 18:57:04.475247 5977 handler.go:208] Removed *v1.Node event handler 2\\\\nI0929 18:57:04.475254 5977 handler.go:208] Removed *v1.Node event handler 7\\\\nI0929 18:57:04.475261 5977 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0929 18:57:04.475268 5977 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI0929 18:57:04.475276 5977 handler.go:208] Removed *v1.Pod event handler 3\\\\nI0929 18:57:04.475478 5977 reflector.go:311] Stopping reflector *v1.EgressIP (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI0929 18:57:04.475614 5977 reflector.go:311] Stopping reflector *v1alpha1.BaselineAdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI0929 18:57:04.476210 5977 reflector.go:311] Stopping reflector *v1alpha1.AdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI0929 18:57:04.476299 5977 reflector.go:311] Stopping reflector *v1.Pod (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0929 18:57:04.476380 5977 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0929 18:57:04.476931 5977 factory.go:656] Stopping \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T18:57:00Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e402552ce11d3c59a676cb86052c03bd0f6ef0c17950b38a05cb3ae1f56e205d\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T18:57:07Z\\\",\\\"message\\\":\\\"DName:}]\\\\nI0929 18:57:07.047441 6143 transact.go:42] Configuring OVN: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-marketplace/redhat-marketplace]} name:Service_openshift-marketplace/redhat-marketplace_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.5.140:50051:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {97b6e7b0-06ca-455e-8259-06895040cb0c}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI0929 18:57:07.047477 6143 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF0929 18:57:07.047535 6143 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: handler {0x1fe5060 0x1fe4d40 0x1fe4ce0} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: cer\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T18:57:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7138892e31e3d1949d0ae4789515fc0fd9868469eb14de1464a2f59786b85f08\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d0516004c2ea4a5711f5e00dcfa01fd5c8d0c0d0d60fd31b0d7da586cd83a820\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d0516004c2ea4a5711f5e00dcfa01fd5c8d0c0d0d60fd31b0d7da586cd83a820\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T18:56:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:53Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-hr4cm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:19Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:19 crc kubenswrapper[4792]: I0929 18:57:19.243091 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:19Z","lastTransitionTime":"2025-09-29T18:57:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:19 crc kubenswrapper[4792]: I0929 18:57:19.265816 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-5hwvp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"100876d3-2539-47f1-91fa-0f91456ccac1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3ff4d11cfba0349ddf3f5a14c525716cfdff95c71698634e8feca328d6e41e2d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfblz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:53Z\\\"}}\" for pod \"openshift-multus\"/\"multus-5hwvp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:19Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:19 crc kubenswrapper[4792]: I0929 18:57:19.287899 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:19Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:19 crc kubenswrapper[4792]: I0929 18:57:19.303493 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:19Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:19 crc kubenswrapper[4792]: I0929 18:57:19.316159 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-c228l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc16dcda-372e-4aac-8c12-148bf93e8783\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://af2529526423852e215c3201a4d8807a880e07e9cf71d593f304a4a3c99900eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6mz5l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:52Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-c228l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:19Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:19 crc kubenswrapper[4792]: I0929 18:57:19.328008 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-rr4g5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1762a3e4-6068-48d9-9b1d-bd5b893803bb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7d6bc8edd4388d2cc81a9741b23060a4982a85a1ac8ec23f2052436cff8cd7fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:57:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xnnkn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8053630e82cf267f19de300922033d2cade4b754707ca7c0d7fbfd7e4957eefc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:57:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xnnkn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:57:06Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-rr4g5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:19Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:19 crc kubenswrapper[4792]: I0929 18:57:19.346804 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:19 crc kubenswrapper[4792]: I0929 18:57:19.346828 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:19 crc kubenswrapper[4792]: I0929 18:57:19.346839 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:19 crc kubenswrapper[4792]: I0929 18:57:19.346871 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:19 crc kubenswrapper[4792]: I0929 18:57:19.346883 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:19Z","lastTransitionTime":"2025-09-29T18:57:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:19 crc kubenswrapper[4792]: I0929 18:57:19.450069 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:19 crc kubenswrapper[4792]: I0929 18:57:19.450125 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:19 crc kubenswrapper[4792]: I0929 18:57:19.450143 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:19 crc kubenswrapper[4792]: I0929 18:57:19.450166 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:19 crc kubenswrapper[4792]: I0929 18:57:19.450185 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:19Z","lastTransitionTime":"2025-09-29T18:57:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:19 crc kubenswrapper[4792]: I0929 18:57:19.552744 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:19 crc kubenswrapper[4792]: I0929 18:57:19.552784 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:19 crc kubenswrapper[4792]: I0929 18:57:19.552796 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:19 crc kubenswrapper[4792]: I0929 18:57:19.552814 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:19 crc kubenswrapper[4792]: I0929 18:57:19.552827 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:19Z","lastTransitionTime":"2025-09-29T18:57:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:19 crc kubenswrapper[4792]: I0929 18:57:19.655201 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:19 crc kubenswrapper[4792]: I0929 18:57:19.655277 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:19 crc kubenswrapper[4792]: I0929 18:57:19.655300 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:19 crc kubenswrapper[4792]: I0929 18:57:19.655328 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:19 crc kubenswrapper[4792]: I0929 18:57:19.655350 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:19Z","lastTransitionTime":"2025-09-29T18:57:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:19 crc kubenswrapper[4792]: I0929 18:57:19.758496 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:19 crc kubenswrapper[4792]: I0929 18:57:19.758577 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:19 crc kubenswrapper[4792]: I0929 18:57:19.758601 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:19 crc kubenswrapper[4792]: I0929 18:57:19.758622 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:19 crc kubenswrapper[4792]: I0929 18:57:19.758634 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:19Z","lastTransitionTime":"2025-09-29T18:57:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:19 crc kubenswrapper[4792]: I0929 18:57:19.861274 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:19 crc kubenswrapper[4792]: I0929 18:57:19.861347 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:19 crc kubenswrapper[4792]: I0929 18:57:19.861373 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:19 crc kubenswrapper[4792]: I0929 18:57:19.861403 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:19 crc kubenswrapper[4792]: I0929 18:57:19.861424 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:19Z","lastTransitionTime":"2025-09-29T18:57:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:19 crc kubenswrapper[4792]: I0929 18:57:19.964012 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:19 crc kubenswrapper[4792]: I0929 18:57:19.964062 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:19 crc kubenswrapper[4792]: I0929 18:57:19.964080 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:19 crc kubenswrapper[4792]: I0929 18:57:19.964103 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:19 crc kubenswrapper[4792]: I0929 18:57:19.964120 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:19Z","lastTransitionTime":"2025-09-29T18:57:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:20 crc kubenswrapper[4792]: I0929 18:57:20.015874 4792 scope.go:117] "RemoveContainer" containerID="e402552ce11d3c59a676cb86052c03bd0f6ef0c17950b38a05cb3ae1f56e205d" Sep 29 18:57:20 crc kubenswrapper[4792]: I0929 18:57:20.043484 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8067c4cf598ce2d361c4a76b51ef3cf14d1fc84ad7ee193d76e20cd980f197be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://693ef3ee15f0b8762a16adc20435397e073dea4b0028f4175899cb956eaab303\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:20Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:20 crc kubenswrapper[4792]: I0929 18:57:20.066726 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-hr4cm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"716c5fdd-0e02-4066-9210-93d805b6fe81\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:53Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:53Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7b64445ce1e067504326c5005136522f885ba8796579cfb651019d2372a89173\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://46a3cf64e8fd5f5c75be0dd56175bd00e95e2780c73e39558e3b68ca1e6a44bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3408e50d82d1e7f50d9cd4fb2b4e078059bbc4daba10ca93c3cab56d4fe190be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a5023531ae972c8f19f5fbf8cdb3c4040f1b63d5d7b9d00e885607f0f84c88a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e9625b3628f291ecaa686da104d719695bd8c46eb46d08f9eccab27a2013627\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3a44c0899a9afeaa74bb22565c3f9514603ce1b83f9794539f677d067785925\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e402552ce11d3c59a676cb86052c03bd0f6ef0c17950b38a05cb3ae1f56e205d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e402552ce11d3c59a676cb86052c03bd0f6ef0c17950b38a05cb3ae1f56e205d\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T18:57:07Z\\\",\\\"message\\\":\\\"DName:}]\\\\nI0929 18:57:07.047441 6143 transact.go:42] Configuring OVN: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-marketplace/redhat-marketplace]} name:Service_openshift-marketplace/redhat-marketplace_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.5.140:50051:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {97b6e7b0-06ca-455e-8259-06895040cb0c}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI0929 18:57:07.047477 6143 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF0929 18:57:07.047535 6143 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: handler {0x1fe5060 0x1fe4d40 0x1fe4ce0} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: cer\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T18:57:06Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-hr4cm_openshift-ovn-kubernetes(716c5fdd-0e02-4066-9210-93d805b6fe81)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7138892e31e3d1949d0ae4789515fc0fd9868469eb14de1464a2f59786b85f08\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d0516004c2ea4a5711f5e00dcfa01fd5c8d0c0d0d60fd31b0d7da586cd83a820\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d0516004c2ea4a5711f5e00dcfa01fd5c8d0c0d0d60fd31b0d7da586cd83a820\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T18:56:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:53Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-hr4cm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:20Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:20 crc kubenswrapper[4792]: I0929 18:57:20.068027 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:20 crc kubenswrapper[4792]: I0929 18:57:20.068423 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:20 crc kubenswrapper[4792]: I0929 18:57:20.068572 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:20 crc kubenswrapper[4792]: I0929 18:57:20.068680 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:20 crc kubenswrapper[4792]: I0929 18:57:20.068800 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:20Z","lastTransitionTime":"2025-09-29T18:57:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:20 crc kubenswrapper[4792]: I0929 18:57:20.083962 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-5hwvp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"100876d3-2539-47f1-91fa-0f91456ccac1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3ff4d11cfba0349ddf3f5a14c525716cfdff95c71698634e8feca328d6e41e2d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfblz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:53Z\\\"}}\" for pod \"openshift-multus\"/\"multus-5hwvp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:20Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:20 crc kubenswrapper[4792]: I0929 18:57:20.102728 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"10bc9cb6-78d3-43a6-8276-db1cb1c116e0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:29Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:29Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://238035b6ad975064a44e7e9e760ae9f09c9ff2735ecc69f65df3fc3176f6d3b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://659f798faed7d5c35bf7959b8e42a37f2289854714d513962716cb73a0674a27\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://38d29fa86782b007a60cbc1ea8f2ccc250dabb38c84eb61931b05fbc170e6538\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f292b6e3b4a31cc851066c3112871836e2c896dea8913da0d3c5579fe5ebb65\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0fdb8c0687f0a5ce31078f6d7a9b643c41ad23199eff4b2878403ee5fd31f69f\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T18:56:45Z\\\",\\\"message\\\":\\\"9 18:56:45.310775 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0929 18:56:45.310981 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI0929 18:56:45.312802 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0929 18:56:45.312836 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0929 18:56:45.312870 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0929 18:56:45.312900 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0929 18:56:45.312941 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI0929 18:56:45.312944 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0929 18:56:45.312985 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI0929 18:56:45.313033 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI0929 18:56:45.313121 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-682747971/tls.crt::/tmp/serving-cert-682747971/tls.key\\\\\\\"\\\\nI0929 18:56:45.313163 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-682747971/tls.crt::/tmp/serving-cert-682747971/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1759172204\\\\\\\\\\\\\\\" (2025-09-29 18:56:43 +0000 UTC to 2025-10-29 18:56:44 +0000 UTC (now=2025-09-29 18:56:45.313121069 +0000 UTC))\\\\\\\"\\\\nF0929 18:56:45.313206 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:44Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:57:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://07171a8c3c7812c016ee534ff1332f697d2b2cdfa70fc9d94ae6a5f312e0e433\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:32Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6e733b73d0293211bf2e8e97dc7db49c34e8ac1ef1e6e19013183d8518345959\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6e733b73d0293211bf2e8e97dc7db49c34e8ac1ef1e6e19013183d8518345959\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T18:56:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:29Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:20Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:20 crc kubenswrapper[4792]: I0929 18:57:20.124008 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ba8b9fef5faf6504a0e363f092cc9f60b03723775a0a0624b6302b3dac43a7ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:20Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:20 crc kubenswrapper[4792]: I0929 18:57:20.140176 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:20Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:20 crc kubenswrapper[4792]: I0929 18:57:20.155079 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-c228l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc16dcda-372e-4aac-8c12-148bf93e8783\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://af2529526423852e215c3201a4d8807a880e07e9cf71d593f304a4a3c99900eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6mz5l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:52Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-c228l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:20Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:20 crc kubenswrapper[4792]: I0929 18:57:20.171062 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-rr4g5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1762a3e4-6068-48d9-9b1d-bd5b893803bb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7d6bc8edd4388d2cc81a9741b23060a4982a85a1ac8ec23f2052436cff8cd7fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:57:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xnnkn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8053630e82cf267f19de300922033d2cade4b754707ca7c0d7fbfd7e4957eefc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:57:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xnnkn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:57:06Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-rr4g5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:20Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:20 crc kubenswrapper[4792]: I0929 18:57:20.188495 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:20 crc kubenswrapper[4792]: I0929 18:57:20.188558 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:20 crc kubenswrapper[4792]: I0929 18:57:20.188578 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:20 crc kubenswrapper[4792]: I0929 18:57:20.188601 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:20 crc kubenswrapper[4792]: I0929 18:57:20.188616 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:20Z","lastTransitionTime":"2025-09-29T18:57:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:20 crc kubenswrapper[4792]: I0929 18:57:20.208141 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:20Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:20 crc kubenswrapper[4792]: I0929 18:57:20.246550 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:20Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:20 crc kubenswrapper[4792]: I0929 18:57:20.263458 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-4gmtk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b84b9e91-b50e-4271-bfc8-be15652128c5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b32084075b7423c8211ca56595a2eb11add581b500043804cb09f13d07788bd6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lc999\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:55Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-4gmtk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:20Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:20 crc kubenswrapper[4792]: I0929 18:57:20.282502 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0ae66548-086e-4ca9-bd6f-281ce46e7557\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b27d8e307d9f6545acd48d9a838dc98fec84ca2e48b357966af22144b8cd415f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://305645f1f10b20984067c3d0d32bc9a5936e191faecff2bb494be005fc471c65\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:53Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-p5q59\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:20Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:20 crc kubenswrapper[4792]: I0929 18:57:20.291001 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:20 crc kubenswrapper[4792]: I0929 18:57:20.291044 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:20 crc kubenswrapper[4792]: I0929 18:57:20.291057 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:20 crc kubenswrapper[4792]: I0929 18:57:20.291076 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:20 crc kubenswrapper[4792]: I0929 18:57:20.291087 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:20Z","lastTransitionTime":"2025-09-29T18:57:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:20 crc kubenswrapper[4792]: I0929 18:57:20.304199 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-rqbjv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"67c58ee5-e056-4e3e-91ed-a116350f2408\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://17470ef608c6b717c0346349c1e72046e200b3879df2772778878b0e83c05b7e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:57:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ms9xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cdd799a430b3a444b7ba74ae8c285de28790049390a462485812fe117f9dfbe8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cdd799a430b3a444b7ba74ae8c285de28790049390a462485812fe117f9dfbe8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T18:56:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ms9xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1d779fed3cf67ba40d6664f26d829858ec14749c48c09678b73d7fb8fe73c827\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1d779fed3cf67ba40d6664f26d829858ec14749c48c09678b73d7fb8fe73c827\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T18:56:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ms9xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3094a1172df2fd98e699c4d368a14584e51bce43389c9c6432e24e78d460a3eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3094a1172df2fd98e699c4d368a14584e51bce43389c9c6432e24e78d460a3eb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T18:56:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ms9xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://79dfa5c03ec31df7b6477646c437b7490658801c0b8f7fac5e9149e4c7a882bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://79dfa5c03ec31df7b6477646c437b7490658801c0b8f7fac5e9149e4c7a882bf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T18:56:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ms9xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://aed6e427a87d4a4617a9d1c9a4d37cf2f9815d1759336026545d563b1f9b6976\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aed6e427a87d4a4617a9d1c9a4d37cf2f9815d1759336026545d563b1f9b6976\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T18:57:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ms9xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0830e9f46c282fc2be6beeea2654758eb0b3a0a86b802f495928c846bc49f7ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0830e9f46c282fc2be6beeea2654758eb0b3a0a86b802f495928c846bc49f7ed\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T18:57:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T18:57:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ms9xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:53Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-rqbjv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:20Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:20 crc kubenswrapper[4792]: I0929 18:57:20.315741 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-v5b2m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fd292349-0e5a-4d80-b163-193aa43c98db\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:07Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:07Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:07Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d8ps7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d8ps7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:57:07Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-v5b2m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:20Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:20 crc kubenswrapper[4792]: I0929 18:57:20.327124 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"49187618-8fed-4b0f-bdf8-800408f708fc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://325b543480e9e1abd49c6ce98398a79ef51983b8035774b2e88447ee547733c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://12d3875b8db9620798f766024b1bc43b78759f42e467b67aaf87f0b0154a8fad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://100ab44da711fddded7f88aa053b6a47d1c8302557d9ae6a56d4f744140e34bd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://af34e705a941f92c031edf3d214a902640010036401914f60e598a46043d5eb3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:29Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:20Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:20 crc kubenswrapper[4792]: I0929 18:57:20.339223 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:52Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:52Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cb29207afd9a5fb06242890aaf6d32f2f789cbf824b0246706e7214486ac529c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:20Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:20 crc kubenswrapper[4792]: I0929 18:57:20.351958 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-hr4cm_716c5fdd-0e02-4066-9210-93d805b6fe81/ovnkube-controller/1.log" Sep 29 18:57:20 crc kubenswrapper[4792]: I0929 18:57:20.354495 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-hr4cm" event={"ID":"716c5fdd-0e02-4066-9210-93d805b6fe81","Type":"ContainerStarted","Data":"1121dcf30430fdbc7f6353dacdf1f0233d9053a2acfd8e7e248bc6e9faaf66e2"} Sep 29 18:57:20 crc kubenswrapper[4792]: I0929 18:57:20.354615 4792 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Sep 29 18:57:20 crc kubenswrapper[4792]: I0929 18:57:20.366346 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8067c4cf598ce2d361c4a76b51ef3cf14d1fc84ad7ee193d76e20cd980f197be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://693ef3ee15f0b8762a16adc20435397e073dea4b0028f4175899cb956eaab303\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:20Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:20 crc kubenswrapper[4792]: I0929 18:57:20.384189 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-hr4cm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"716c5fdd-0e02-4066-9210-93d805b6fe81\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:53Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:53Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7b64445ce1e067504326c5005136522f885ba8796579cfb651019d2372a89173\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://46a3cf64e8fd5f5c75be0dd56175bd00e95e2780c73e39558e3b68ca1e6a44bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3408e50d82d1e7f50d9cd4fb2b4e078059bbc4daba10ca93c3cab56d4fe190be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a5023531ae972c8f19f5fbf8cdb3c4040f1b63d5d7b9d00e885607f0f84c88a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e9625b3628f291ecaa686da104d719695bd8c46eb46d08f9eccab27a2013627\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3a44c0899a9afeaa74bb22565c3f9514603ce1b83f9794539f677d067785925\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1121dcf30430fdbc7f6353dacdf1f0233d9053a2acfd8e7e248bc6e9faaf66e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e402552ce11d3c59a676cb86052c03bd0f6ef0c17950b38a05cb3ae1f56e205d\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T18:57:07Z\\\",\\\"message\\\":\\\"DName:}]\\\\nI0929 18:57:07.047441 6143 transact.go:42] Configuring OVN: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-marketplace/redhat-marketplace]} name:Service_openshift-marketplace/redhat-marketplace_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.5.140:50051:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {97b6e7b0-06ca-455e-8259-06895040cb0c}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI0929 18:57:07.047477 6143 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF0929 18:57:07.047535 6143 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: handler {0x1fe5060 0x1fe4d40 0x1fe4ce0} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: cer\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T18:57:06Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:57:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7138892e31e3d1949d0ae4789515fc0fd9868469eb14de1464a2f59786b85f08\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d0516004c2ea4a5711f5e00dcfa01fd5c8d0c0d0d60fd31b0d7da586cd83a820\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d0516004c2ea4a5711f5e00dcfa01fd5c8d0c0d0d60fd31b0d7da586cd83a820\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T18:56:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:53Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-hr4cm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:20Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:20 crc kubenswrapper[4792]: I0929 18:57:20.392990 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:20 crc kubenswrapper[4792]: I0929 18:57:20.393035 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:20 crc kubenswrapper[4792]: I0929 18:57:20.393047 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:20 crc kubenswrapper[4792]: I0929 18:57:20.393065 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:20 crc kubenswrapper[4792]: I0929 18:57:20.393078 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:20Z","lastTransitionTime":"2025-09-29T18:57:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:20 crc kubenswrapper[4792]: I0929 18:57:20.399837 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-5hwvp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"100876d3-2539-47f1-91fa-0f91456ccac1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3ff4d11cfba0349ddf3f5a14c525716cfdff95c71698634e8feca328d6e41e2d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfblz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:53Z\\\"}}\" for pod \"openshift-multus\"/\"multus-5hwvp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:20Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:20 crc kubenswrapper[4792]: I0929 18:57:20.417441 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"10bc9cb6-78d3-43a6-8276-db1cb1c116e0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:29Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:29Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://238035b6ad975064a44e7e9e760ae9f09c9ff2735ecc69f65df3fc3176f6d3b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://659f798faed7d5c35bf7959b8e42a37f2289854714d513962716cb73a0674a27\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://38d29fa86782b007a60cbc1ea8f2ccc250dabb38c84eb61931b05fbc170e6538\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f292b6e3b4a31cc851066c3112871836e2c896dea8913da0d3c5579fe5ebb65\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0fdb8c0687f0a5ce31078f6d7a9b643c41ad23199eff4b2878403ee5fd31f69f\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T18:56:45Z\\\",\\\"message\\\":\\\"9 18:56:45.310775 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0929 18:56:45.310981 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI0929 18:56:45.312802 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0929 18:56:45.312836 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0929 18:56:45.312870 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0929 18:56:45.312900 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0929 18:56:45.312941 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI0929 18:56:45.312944 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0929 18:56:45.312985 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI0929 18:56:45.313033 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI0929 18:56:45.313121 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-682747971/tls.crt::/tmp/serving-cert-682747971/tls.key\\\\\\\"\\\\nI0929 18:56:45.313163 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-682747971/tls.crt::/tmp/serving-cert-682747971/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1759172204\\\\\\\\\\\\\\\" (2025-09-29 18:56:43 +0000 UTC to 2025-10-29 18:56:44 +0000 UTC (now=2025-09-29 18:56:45.313121069 +0000 UTC))\\\\\\\"\\\\nF0929 18:56:45.313206 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:44Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:57:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://07171a8c3c7812c016ee534ff1332f697d2b2cdfa70fc9d94ae6a5f312e0e433\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:32Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6e733b73d0293211bf2e8e97dc7db49c34e8ac1ef1e6e19013183d8518345959\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6e733b73d0293211bf2e8e97dc7db49c34e8ac1ef1e6e19013183d8518345959\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T18:56:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:29Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:20Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:20 crc kubenswrapper[4792]: I0929 18:57:20.436496 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ba8b9fef5faf6504a0e363f092cc9f60b03723775a0a0624b6302b3dac43a7ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:20Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:20 crc kubenswrapper[4792]: I0929 18:57:20.457823 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:20Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:20 crc kubenswrapper[4792]: I0929 18:57:20.474779 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-c228l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc16dcda-372e-4aac-8c12-148bf93e8783\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://af2529526423852e215c3201a4d8807a880e07e9cf71d593f304a4a3c99900eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6mz5l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:52Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-c228l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:20Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:20 crc kubenswrapper[4792]: I0929 18:57:20.492831 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-hr4cm" Sep 29 18:57:20 crc kubenswrapper[4792]: I0929 18:57:20.493408 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-rr4g5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1762a3e4-6068-48d9-9b1d-bd5b893803bb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7d6bc8edd4388d2cc81a9741b23060a4982a85a1ac8ec23f2052436cff8cd7fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:57:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xnnkn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8053630e82cf267f19de300922033d2cade4b754707ca7c0d7fbfd7e4957eefc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:57:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xnnkn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:57:06Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-rr4g5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:20Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:20 crc kubenswrapper[4792]: I0929 18:57:20.495029 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:20 crc kubenswrapper[4792]: I0929 18:57:20.495072 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:20 crc kubenswrapper[4792]: I0929 18:57:20.495084 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:20 crc kubenswrapper[4792]: I0929 18:57:20.495102 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:20 crc kubenswrapper[4792]: I0929 18:57:20.495114 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:20Z","lastTransitionTime":"2025-09-29T18:57:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:20 crc kubenswrapper[4792]: I0929 18:57:20.506809 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:20Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:20 crc kubenswrapper[4792]: I0929 18:57:20.518307 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:20Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:20 crc kubenswrapper[4792]: I0929 18:57:20.532763 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-4gmtk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b84b9e91-b50e-4271-bfc8-be15652128c5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b32084075b7423c8211ca56595a2eb11add581b500043804cb09f13d07788bd6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lc999\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:55Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-4gmtk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:20Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:20 crc kubenswrapper[4792]: I0929 18:57:20.543058 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0ae66548-086e-4ca9-bd6f-281ce46e7557\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b27d8e307d9f6545acd48d9a838dc98fec84ca2e48b357966af22144b8cd415f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://305645f1f10b20984067c3d0d32bc9a5936e191faecff2bb494be005fc471c65\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:53Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-p5q59\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:20Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:20 crc kubenswrapper[4792]: I0929 18:57:20.555677 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-rqbjv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"67c58ee5-e056-4e3e-91ed-a116350f2408\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://17470ef608c6b717c0346349c1e72046e200b3879df2772778878b0e83c05b7e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:57:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ms9xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cdd799a430b3a444b7ba74ae8c285de28790049390a462485812fe117f9dfbe8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cdd799a430b3a444b7ba74ae8c285de28790049390a462485812fe117f9dfbe8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T18:56:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ms9xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1d779fed3cf67ba40d6664f26d829858ec14749c48c09678b73d7fb8fe73c827\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1d779fed3cf67ba40d6664f26d829858ec14749c48c09678b73d7fb8fe73c827\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T18:56:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ms9xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3094a1172df2fd98e699c4d368a14584e51bce43389c9c6432e24e78d460a3eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3094a1172df2fd98e699c4d368a14584e51bce43389c9c6432e24e78d460a3eb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T18:56:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ms9xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://79dfa5c03ec31df7b6477646c437b7490658801c0b8f7fac5e9149e4c7a882bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://79dfa5c03ec31df7b6477646c437b7490658801c0b8f7fac5e9149e4c7a882bf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T18:56:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ms9xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://aed6e427a87d4a4617a9d1c9a4d37cf2f9815d1759336026545d563b1f9b6976\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aed6e427a87d4a4617a9d1c9a4d37cf2f9815d1759336026545d563b1f9b6976\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T18:57:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ms9xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0830e9f46c282fc2be6beeea2654758eb0b3a0a86b802f495928c846bc49f7ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0830e9f46c282fc2be6beeea2654758eb0b3a0a86b802f495928c846bc49f7ed\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T18:57:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T18:57:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ms9xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:53Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-rqbjv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:20Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:20 crc kubenswrapper[4792]: I0929 18:57:20.565607 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-v5b2m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fd292349-0e5a-4d80-b163-193aa43c98db\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:07Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:07Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:07Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d8ps7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d8ps7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:57:07Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-v5b2m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:20Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:20 crc kubenswrapper[4792]: I0929 18:57:20.577182 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"49187618-8fed-4b0f-bdf8-800408f708fc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://325b543480e9e1abd49c6ce98398a79ef51983b8035774b2e88447ee547733c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://12d3875b8db9620798f766024b1bc43b78759f42e467b67aaf87f0b0154a8fad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://100ab44da711fddded7f88aa053b6a47d1c8302557d9ae6a56d4f744140e34bd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://af34e705a941f92c031edf3d214a902640010036401914f60e598a46043d5eb3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:29Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:20Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:20 crc kubenswrapper[4792]: I0929 18:57:20.589316 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:52Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:52Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cb29207afd9a5fb06242890aaf6d32f2f789cbf824b0246706e7214486ac529c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:20Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:20 crc kubenswrapper[4792]: I0929 18:57:20.597096 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:20 crc kubenswrapper[4792]: I0929 18:57:20.597137 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:20 crc kubenswrapper[4792]: I0929 18:57:20.597149 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:20 crc kubenswrapper[4792]: I0929 18:57:20.597165 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:20 crc kubenswrapper[4792]: I0929 18:57:20.597176 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:20Z","lastTransitionTime":"2025-09-29T18:57:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:20 crc kubenswrapper[4792]: I0929 18:57:20.699639 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:20 crc kubenswrapper[4792]: I0929 18:57:20.699687 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:20 crc kubenswrapper[4792]: I0929 18:57:20.699696 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:20 crc kubenswrapper[4792]: I0929 18:57:20.699711 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:20 crc kubenswrapper[4792]: I0929 18:57:20.699720 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:20Z","lastTransitionTime":"2025-09-29T18:57:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:20 crc kubenswrapper[4792]: I0929 18:57:20.750268 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 18:57:20 crc kubenswrapper[4792]: I0929 18:57:20.750390 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 18:57:20 crc kubenswrapper[4792]: E0929 18:57:20.750461 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 18:57:52.75042951 +0000 UTC m=+84.743736896 (durationBeforeRetry 32s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 18:57:20 crc kubenswrapper[4792]: E0929 18:57:20.750473 4792 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Sep 29 18:57:20 crc kubenswrapper[4792]: E0929 18:57:20.750540 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-09-29 18:57:52.750525923 +0000 UTC m=+84.743833309 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Sep 29 18:57:20 crc kubenswrapper[4792]: I0929 18:57:20.803007 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:20 crc kubenswrapper[4792]: I0929 18:57:20.803051 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:20 crc kubenswrapper[4792]: I0929 18:57:20.803065 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:20 crc kubenswrapper[4792]: I0929 18:57:20.803080 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:20 crc kubenswrapper[4792]: I0929 18:57:20.803092 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:20Z","lastTransitionTime":"2025-09-29T18:57:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:20 crc kubenswrapper[4792]: I0929 18:57:20.851810 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 18:57:20 crc kubenswrapper[4792]: I0929 18:57:20.851896 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 18:57:20 crc kubenswrapper[4792]: I0929 18:57:20.851931 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 18:57:20 crc kubenswrapper[4792]: E0929 18:57:20.852016 4792 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Sep 29 18:57:20 crc kubenswrapper[4792]: E0929 18:57:20.852108 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-09-29 18:57:52.852090523 +0000 UTC m=+84.845397919 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Sep 29 18:57:20 crc kubenswrapper[4792]: E0929 18:57:20.852128 4792 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Sep 29 18:57:20 crc kubenswrapper[4792]: E0929 18:57:20.852166 4792 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Sep 29 18:57:20 crc kubenswrapper[4792]: E0929 18:57:20.852181 4792 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 29 18:57:20 crc kubenswrapper[4792]: E0929 18:57:20.852254 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-09-29 18:57:52.852232486 +0000 UTC m=+84.845539892 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 29 18:57:20 crc kubenswrapper[4792]: E0929 18:57:20.852265 4792 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Sep 29 18:57:20 crc kubenswrapper[4792]: E0929 18:57:20.852351 4792 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Sep 29 18:57:20 crc kubenswrapper[4792]: E0929 18:57:20.852383 4792 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 29 18:57:20 crc kubenswrapper[4792]: E0929 18:57:20.852501 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-09-29 18:57:52.852464602 +0000 UTC m=+84.845772028 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 29 18:57:20 crc kubenswrapper[4792]: I0929 18:57:20.906271 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:20 crc kubenswrapper[4792]: I0929 18:57:20.906308 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:20 crc kubenswrapper[4792]: I0929 18:57:20.906321 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:20 crc kubenswrapper[4792]: I0929 18:57:20.906338 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:20 crc kubenswrapper[4792]: I0929 18:57:20.906350 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:20Z","lastTransitionTime":"2025-09-29T18:57:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:21 crc kubenswrapper[4792]: I0929 18:57:21.009132 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:21 crc kubenswrapper[4792]: I0929 18:57:21.009189 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:21 crc kubenswrapper[4792]: I0929 18:57:21.009199 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:21 crc kubenswrapper[4792]: I0929 18:57:21.009220 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:21 crc kubenswrapper[4792]: I0929 18:57:21.009236 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:21Z","lastTransitionTime":"2025-09-29T18:57:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:21 crc kubenswrapper[4792]: I0929 18:57:21.015109 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 18:57:21 crc kubenswrapper[4792]: I0929 18:57:21.015149 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 18:57:21 crc kubenswrapper[4792]: I0929 18:57:21.015172 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 18:57:21 crc kubenswrapper[4792]: I0929 18:57:21.015149 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-v5b2m" Sep 29 18:57:21 crc kubenswrapper[4792]: E0929 18:57:21.015262 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 18:57:21 crc kubenswrapper[4792]: E0929 18:57:21.015385 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 18:57:21 crc kubenswrapper[4792]: E0929 18:57:21.015817 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 18:57:21 crc kubenswrapper[4792]: E0929 18:57:21.015877 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-v5b2m" podUID="fd292349-0e5a-4d80-b163-193aa43c98db" Sep 29 18:57:21 crc kubenswrapper[4792]: I0929 18:57:21.112196 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:21 crc kubenswrapper[4792]: I0929 18:57:21.112252 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:21 crc kubenswrapper[4792]: I0929 18:57:21.112269 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:21 crc kubenswrapper[4792]: I0929 18:57:21.112293 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:21 crc kubenswrapper[4792]: I0929 18:57:21.112311 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:21Z","lastTransitionTime":"2025-09-29T18:57:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:21 crc kubenswrapper[4792]: I0929 18:57:21.215251 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:21 crc kubenswrapper[4792]: I0929 18:57:21.215304 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:21 crc kubenswrapper[4792]: I0929 18:57:21.215321 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:21 crc kubenswrapper[4792]: I0929 18:57:21.215343 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:21 crc kubenswrapper[4792]: I0929 18:57:21.215360 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:21Z","lastTransitionTime":"2025-09-29T18:57:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:21 crc kubenswrapper[4792]: I0929 18:57:21.318240 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:21 crc kubenswrapper[4792]: I0929 18:57:21.318526 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:21 crc kubenswrapper[4792]: I0929 18:57:21.318587 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:21 crc kubenswrapper[4792]: I0929 18:57:21.318662 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:21 crc kubenswrapper[4792]: I0929 18:57:21.318722 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:21Z","lastTransitionTime":"2025-09-29T18:57:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:21 crc kubenswrapper[4792]: I0929 18:57:21.360570 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-hr4cm_716c5fdd-0e02-4066-9210-93d805b6fe81/ovnkube-controller/2.log" Sep 29 18:57:21 crc kubenswrapper[4792]: I0929 18:57:21.361713 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-hr4cm_716c5fdd-0e02-4066-9210-93d805b6fe81/ovnkube-controller/1.log" Sep 29 18:57:21 crc kubenswrapper[4792]: I0929 18:57:21.366131 4792 generic.go:334] "Generic (PLEG): container finished" podID="716c5fdd-0e02-4066-9210-93d805b6fe81" containerID="1121dcf30430fdbc7f6353dacdf1f0233d9053a2acfd8e7e248bc6e9faaf66e2" exitCode=1 Sep 29 18:57:21 crc kubenswrapper[4792]: I0929 18:57:21.366272 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-hr4cm" event={"ID":"716c5fdd-0e02-4066-9210-93d805b6fe81","Type":"ContainerDied","Data":"1121dcf30430fdbc7f6353dacdf1f0233d9053a2acfd8e7e248bc6e9faaf66e2"} Sep 29 18:57:21 crc kubenswrapper[4792]: I0929 18:57:21.366384 4792 scope.go:117] "RemoveContainer" containerID="e402552ce11d3c59a676cb86052c03bd0f6ef0c17950b38a05cb3ae1f56e205d" Sep 29 18:57:21 crc kubenswrapper[4792]: I0929 18:57:21.369960 4792 scope.go:117] "RemoveContainer" containerID="1121dcf30430fdbc7f6353dacdf1f0233d9053a2acfd8e7e248bc6e9faaf66e2" Sep 29 18:57:21 crc kubenswrapper[4792]: E0929 18:57:21.370375 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-hr4cm_openshift-ovn-kubernetes(716c5fdd-0e02-4066-9210-93d805b6fe81)\"" pod="openshift-ovn-kubernetes/ovnkube-node-hr4cm" podUID="716c5fdd-0e02-4066-9210-93d805b6fe81" Sep 29 18:57:21 crc kubenswrapper[4792]: I0929 18:57:21.397819 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"10bc9cb6-78d3-43a6-8276-db1cb1c116e0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:29Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:29Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://238035b6ad975064a44e7e9e760ae9f09c9ff2735ecc69f65df3fc3176f6d3b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://659f798faed7d5c35bf7959b8e42a37f2289854714d513962716cb73a0674a27\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://38d29fa86782b007a60cbc1ea8f2ccc250dabb38c84eb61931b05fbc170e6538\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f292b6e3b4a31cc851066c3112871836e2c896dea8913da0d3c5579fe5ebb65\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0fdb8c0687f0a5ce31078f6d7a9b643c41ad23199eff4b2878403ee5fd31f69f\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T18:56:45Z\\\",\\\"message\\\":\\\"9 18:56:45.310775 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0929 18:56:45.310981 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI0929 18:56:45.312802 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0929 18:56:45.312836 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0929 18:56:45.312870 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0929 18:56:45.312900 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0929 18:56:45.312941 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI0929 18:56:45.312944 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0929 18:56:45.312985 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI0929 18:56:45.313033 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI0929 18:56:45.313121 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-682747971/tls.crt::/tmp/serving-cert-682747971/tls.key\\\\\\\"\\\\nI0929 18:56:45.313163 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-682747971/tls.crt::/tmp/serving-cert-682747971/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1759172204\\\\\\\\\\\\\\\" (2025-09-29 18:56:43 +0000 UTC to 2025-10-29 18:56:44 +0000 UTC (now=2025-09-29 18:56:45.313121069 +0000 UTC))\\\\\\\"\\\\nF0929 18:56:45.313206 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:44Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:57:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://07171a8c3c7812c016ee534ff1332f697d2b2cdfa70fc9d94ae6a5f312e0e433\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:32Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6e733b73d0293211bf2e8e97dc7db49c34e8ac1ef1e6e19013183d8518345959\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6e733b73d0293211bf2e8e97dc7db49c34e8ac1ef1e6e19013183d8518345959\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T18:56:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:29Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:21Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:21 crc kubenswrapper[4792]: I0929 18:57:21.416175 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ba8b9fef5faf6504a0e363f092cc9f60b03723775a0a0624b6302b3dac43a7ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:21Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:21 crc kubenswrapper[4792]: I0929 18:57:21.421456 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:21 crc kubenswrapper[4792]: I0929 18:57:21.421513 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:21 crc kubenswrapper[4792]: I0929 18:57:21.421527 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:21 crc kubenswrapper[4792]: I0929 18:57:21.421541 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:21 crc kubenswrapper[4792]: I0929 18:57:21.421575 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:21Z","lastTransitionTime":"2025-09-29T18:57:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:21 crc kubenswrapper[4792]: I0929 18:57:21.433306 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8067c4cf598ce2d361c4a76b51ef3cf14d1fc84ad7ee193d76e20cd980f197be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://693ef3ee15f0b8762a16adc20435397e073dea4b0028f4175899cb956eaab303\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:21Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:21 crc kubenswrapper[4792]: I0929 18:57:21.457476 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-hr4cm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"716c5fdd-0e02-4066-9210-93d805b6fe81\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:53Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:53Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7b64445ce1e067504326c5005136522f885ba8796579cfb651019d2372a89173\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://46a3cf64e8fd5f5c75be0dd56175bd00e95e2780c73e39558e3b68ca1e6a44bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3408e50d82d1e7f50d9cd4fb2b4e078059bbc4daba10ca93c3cab56d4fe190be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a5023531ae972c8f19f5fbf8cdb3c4040f1b63d5d7b9d00e885607f0f84c88a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e9625b3628f291ecaa686da104d719695bd8c46eb46d08f9eccab27a2013627\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3a44c0899a9afeaa74bb22565c3f9514603ce1b83f9794539f677d067785925\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1121dcf30430fdbc7f6353dacdf1f0233d9053a2acfd8e7e248bc6e9faaf66e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e402552ce11d3c59a676cb86052c03bd0f6ef0c17950b38a05cb3ae1f56e205d\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T18:57:07Z\\\",\\\"message\\\":\\\"DName:}]\\\\nI0929 18:57:07.047441 6143 transact.go:42] Configuring OVN: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-marketplace/redhat-marketplace]} name:Service_openshift-marketplace/redhat-marketplace_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.5.140:50051:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {97b6e7b0-06ca-455e-8259-06895040cb0c}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI0929 18:57:07.047477 6143 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF0929 18:57:07.047535 6143 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: handler {0x1fe5060 0x1fe4d40 0x1fe4ce0} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: cer\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T18:57:06Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1121dcf30430fdbc7f6353dacdf1f0233d9053a2acfd8e7e248bc6e9faaf66e2\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T18:57:20Z\\\",\\\"message\\\":\\\".861680 6354 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:20Z is after 2025-08-24T17:21:41Z]\\\\nI0929 18:57:20.861689 6354 base_network_controller_pods.go:477] [default/openshift-network-diagnostics/network-check-source-55646444c4-trplf] creating logical port openshift-network-diagnostics_network-check-source-55646444c4-trplf for pod on switch crc\\\\nI0929 18:57:20.861690 6354 services_controller.go:451] Built service openshift-kube-scheduler-operator/metrics cluster-wide LB for network=default: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-kube-scheduler-operator/metrics_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", E\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T18:57:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7138892e31e3d1949d0ae4789515fc0fd9868469eb14de1464a2f59786b85f08\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d0516004c2ea4a5711f5e00dcfa01fd5c8d0c0d0d60fd31b0d7da586cd83a820\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d0516004c2ea4a5711f5e00dcfa01fd5c8d0c0d0d60fd31b0d7da586cd83a820\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T18:56:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:53Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-hr4cm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:21Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:21 crc kubenswrapper[4792]: I0929 18:57:21.472936 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-5hwvp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"100876d3-2539-47f1-91fa-0f91456ccac1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3ff4d11cfba0349ddf3f5a14c525716cfdff95c71698634e8feca328d6e41e2d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfblz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:53Z\\\"}}\" for pod \"openshift-multus\"/\"multus-5hwvp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:21Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:21 crc kubenswrapper[4792]: I0929 18:57:21.486279 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:21Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:21 crc kubenswrapper[4792]: I0929 18:57:21.497184 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:21Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:21 crc kubenswrapper[4792]: I0929 18:57:21.507640 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-c228l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc16dcda-372e-4aac-8c12-148bf93e8783\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://af2529526423852e215c3201a4d8807a880e07e9cf71d593f304a4a3c99900eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6mz5l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:52Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-c228l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:21Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:21 crc kubenswrapper[4792]: I0929 18:57:21.519901 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-rr4g5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1762a3e4-6068-48d9-9b1d-bd5b893803bb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7d6bc8edd4388d2cc81a9741b23060a4982a85a1ac8ec23f2052436cff8cd7fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:57:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xnnkn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8053630e82cf267f19de300922033d2cade4b754707ca7c0d7fbfd7e4957eefc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:57:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xnnkn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:57:06Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-rr4g5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:21Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:21 crc kubenswrapper[4792]: I0929 18:57:21.525129 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:21 crc kubenswrapper[4792]: I0929 18:57:21.525170 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:21 crc kubenswrapper[4792]: I0929 18:57:21.525183 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:21 crc kubenswrapper[4792]: I0929 18:57:21.525208 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:21 crc kubenswrapper[4792]: I0929 18:57:21.525224 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:21Z","lastTransitionTime":"2025-09-29T18:57:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:21 crc kubenswrapper[4792]: I0929 18:57:21.536890 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:21Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:21 crc kubenswrapper[4792]: I0929 18:57:21.552210 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-4gmtk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b84b9e91-b50e-4271-bfc8-be15652128c5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b32084075b7423c8211ca56595a2eb11add581b500043804cb09f13d07788bd6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lc999\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:55Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-4gmtk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:21Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:21 crc kubenswrapper[4792]: I0929 18:57:21.570798 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"49187618-8fed-4b0f-bdf8-800408f708fc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://325b543480e9e1abd49c6ce98398a79ef51983b8035774b2e88447ee547733c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://12d3875b8db9620798f766024b1bc43b78759f42e467b67aaf87f0b0154a8fad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://100ab44da711fddded7f88aa053b6a47d1c8302557d9ae6a56d4f744140e34bd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://af34e705a941f92c031edf3d214a902640010036401914f60e598a46043d5eb3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:29Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:21Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:21 crc kubenswrapper[4792]: I0929 18:57:21.588925 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:52Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:52Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cb29207afd9a5fb06242890aaf6d32f2f789cbf824b0246706e7214486ac529c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:21Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:21 crc kubenswrapper[4792]: I0929 18:57:21.605294 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0ae66548-086e-4ca9-bd6f-281ce46e7557\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b27d8e307d9f6545acd48d9a838dc98fec84ca2e48b357966af22144b8cd415f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://305645f1f10b20984067c3d0d32bc9a5936e191faecff2bb494be005fc471c65\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:53Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-p5q59\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:21Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:21 crc kubenswrapper[4792]: I0929 18:57:21.622301 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-rqbjv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"67c58ee5-e056-4e3e-91ed-a116350f2408\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://17470ef608c6b717c0346349c1e72046e200b3879df2772778878b0e83c05b7e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:57:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ms9xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cdd799a430b3a444b7ba74ae8c285de28790049390a462485812fe117f9dfbe8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cdd799a430b3a444b7ba74ae8c285de28790049390a462485812fe117f9dfbe8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T18:56:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ms9xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1d779fed3cf67ba40d6664f26d829858ec14749c48c09678b73d7fb8fe73c827\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1d779fed3cf67ba40d6664f26d829858ec14749c48c09678b73d7fb8fe73c827\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T18:56:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ms9xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3094a1172df2fd98e699c4d368a14584e51bce43389c9c6432e24e78d460a3eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3094a1172df2fd98e699c4d368a14584e51bce43389c9c6432e24e78d460a3eb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T18:56:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ms9xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://79dfa5c03ec31df7b6477646c437b7490658801c0b8f7fac5e9149e4c7a882bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://79dfa5c03ec31df7b6477646c437b7490658801c0b8f7fac5e9149e4c7a882bf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T18:56:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ms9xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://aed6e427a87d4a4617a9d1c9a4d37cf2f9815d1759336026545d563b1f9b6976\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aed6e427a87d4a4617a9d1c9a4d37cf2f9815d1759336026545d563b1f9b6976\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T18:57:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ms9xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0830e9f46c282fc2be6beeea2654758eb0b3a0a86b802f495928c846bc49f7ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0830e9f46c282fc2be6beeea2654758eb0b3a0a86b802f495928c846bc49f7ed\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T18:57:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T18:57:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ms9xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:53Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-rqbjv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:21Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:21 crc kubenswrapper[4792]: I0929 18:57:21.628981 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:21 crc kubenswrapper[4792]: I0929 18:57:21.629018 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:21 crc kubenswrapper[4792]: I0929 18:57:21.629031 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:21 crc kubenswrapper[4792]: I0929 18:57:21.629048 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:21 crc kubenswrapper[4792]: I0929 18:57:21.629062 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:21Z","lastTransitionTime":"2025-09-29T18:57:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:21 crc kubenswrapper[4792]: I0929 18:57:21.638138 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-v5b2m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fd292349-0e5a-4d80-b163-193aa43c98db\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:07Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:07Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:07Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d8ps7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d8ps7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:57:07Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-v5b2m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:21Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:21 crc kubenswrapper[4792]: I0929 18:57:21.732458 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:21 crc kubenswrapper[4792]: I0929 18:57:21.732514 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:21 crc kubenswrapper[4792]: I0929 18:57:21.732526 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:21 crc kubenswrapper[4792]: I0929 18:57:21.732549 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:21 crc kubenswrapper[4792]: I0929 18:57:21.732564 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:21Z","lastTransitionTime":"2025-09-29T18:57:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:21 crc kubenswrapper[4792]: I0929 18:57:21.835939 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:21 crc kubenswrapper[4792]: I0929 18:57:21.835997 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:21 crc kubenswrapper[4792]: I0929 18:57:21.836014 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:21 crc kubenswrapper[4792]: I0929 18:57:21.836041 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:21 crc kubenswrapper[4792]: I0929 18:57:21.836062 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:21Z","lastTransitionTime":"2025-09-29T18:57:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:21 crc kubenswrapper[4792]: I0929 18:57:21.940726 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:21 crc kubenswrapper[4792]: I0929 18:57:21.940795 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:21 crc kubenswrapper[4792]: I0929 18:57:21.940814 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:21 crc kubenswrapper[4792]: I0929 18:57:21.940841 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:21 crc kubenswrapper[4792]: I0929 18:57:21.940914 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:21Z","lastTransitionTime":"2025-09-29T18:57:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:22 crc kubenswrapper[4792]: I0929 18:57:22.044948 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:22 crc kubenswrapper[4792]: I0929 18:57:22.045216 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:22 crc kubenswrapper[4792]: I0929 18:57:22.045240 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:22 crc kubenswrapper[4792]: I0929 18:57:22.045267 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:22 crc kubenswrapper[4792]: I0929 18:57:22.045291 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:22Z","lastTransitionTime":"2025-09-29T18:57:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:22 crc kubenswrapper[4792]: I0929 18:57:22.107424 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Sep 29 18:57:22 crc kubenswrapper[4792]: I0929 18:57:22.130497 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-scheduler/openshift-kube-scheduler-crc"] Sep 29 18:57:22 crc kubenswrapper[4792]: I0929 18:57:22.132431 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:22Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:22 crc kubenswrapper[4792]: I0929 18:57:22.149357 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:22 crc kubenswrapper[4792]: I0929 18:57:22.149611 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:22 crc kubenswrapper[4792]: I0929 18:57:22.149752 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:22 crc kubenswrapper[4792]: I0929 18:57:22.149976 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:22 crc kubenswrapper[4792]: I0929 18:57:22.150123 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:22Z","lastTransitionTime":"2025-09-29T18:57:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:22 crc kubenswrapper[4792]: I0929 18:57:22.154745 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:22Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:22 crc kubenswrapper[4792]: I0929 18:57:22.175402 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-c228l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc16dcda-372e-4aac-8c12-148bf93e8783\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://af2529526423852e215c3201a4d8807a880e07e9cf71d593f304a4a3c99900eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6mz5l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:52Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-c228l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:22Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:22 crc kubenswrapper[4792]: I0929 18:57:22.196343 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-rr4g5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1762a3e4-6068-48d9-9b1d-bd5b893803bb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7d6bc8edd4388d2cc81a9741b23060a4982a85a1ac8ec23f2052436cff8cd7fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:57:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xnnkn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8053630e82cf267f19de300922033d2cade4b754707ca7c0d7fbfd7e4957eefc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:57:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xnnkn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:57:06Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-rr4g5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:22Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:22 crc kubenswrapper[4792]: I0929 18:57:22.218719 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:22Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:22 crc kubenswrapper[4792]: I0929 18:57:22.241672 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-4gmtk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b84b9e91-b50e-4271-bfc8-be15652128c5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b32084075b7423c8211ca56595a2eb11add581b500043804cb09f13d07788bd6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lc999\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:55Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-4gmtk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:22Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:22 crc kubenswrapper[4792]: I0929 18:57:22.255832 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:22 crc kubenswrapper[4792]: I0929 18:57:22.255935 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:22 crc kubenswrapper[4792]: I0929 18:57:22.255957 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:22 crc kubenswrapper[4792]: I0929 18:57:22.255996 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:22 crc kubenswrapper[4792]: I0929 18:57:22.256019 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:22Z","lastTransitionTime":"2025-09-29T18:57:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:22 crc kubenswrapper[4792]: I0929 18:57:22.266431 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"49187618-8fed-4b0f-bdf8-800408f708fc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://325b543480e9e1abd49c6ce98398a79ef51983b8035774b2e88447ee547733c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://12d3875b8db9620798f766024b1bc43b78759f42e467b67aaf87f0b0154a8fad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://100ab44da711fddded7f88aa053b6a47d1c8302557d9ae6a56d4f744140e34bd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://af34e705a941f92c031edf3d214a902640010036401914f60e598a46043d5eb3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:29Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:22Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:22 crc kubenswrapper[4792]: I0929 18:57:22.290016 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:52Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:52Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cb29207afd9a5fb06242890aaf6d32f2f789cbf824b0246706e7214486ac529c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:22Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:22 crc kubenswrapper[4792]: I0929 18:57:22.313387 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0ae66548-086e-4ca9-bd6f-281ce46e7557\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b27d8e307d9f6545acd48d9a838dc98fec84ca2e48b357966af22144b8cd415f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://305645f1f10b20984067c3d0d32bc9a5936e191faecff2bb494be005fc471c65\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:53Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-p5q59\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:22Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:22 crc kubenswrapper[4792]: I0929 18:57:22.342473 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-rqbjv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"67c58ee5-e056-4e3e-91ed-a116350f2408\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://17470ef608c6b717c0346349c1e72046e200b3879df2772778878b0e83c05b7e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:57:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ms9xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cdd799a430b3a444b7ba74ae8c285de28790049390a462485812fe117f9dfbe8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cdd799a430b3a444b7ba74ae8c285de28790049390a462485812fe117f9dfbe8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T18:56:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ms9xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1d779fed3cf67ba40d6664f26d829858ec14749c48c09678b73d7fb8fe73c827\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1d779fed3cf67ba40d6664f26d829858ec14749c48c09678b73d7fb8fe73c827\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T18:56:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ms9xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3094a1172df2fd98e699c4d368a14584e51bce43389c9c6432e24e78d460a3eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3094a1172df2fd98e699c4d368a14584e51bce43389c9c6432e24e78d460a3eb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T18:56:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ms9xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://79dfa5c03ec31df7b6477646c437b7490658801c0b8f7fac5e9149e4c7a882bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://79dfa5c03ec31df7b6477646c437b7490658801c0b8f7fac5e9149e4c7a882bf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T18:56:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ms9xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://aed6e427a87d4a4617a9d1c9a4d37cf2f9815d1759336026545d563b1f9b6976\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aed6e427a87d4a4617a9d1c9a4d37cf2f9815d1759336026545d563b1f9b6976\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T18:57:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ms9xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0830e9f46c282fc2be6beeea2654758eb0b3a0a86b802f495928c846bc49f7ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0830e9f46c282fc2be6beeea2654758eb0b3a0a86b802f495928c846bc49f7ed\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T18:57:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T18:57:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ms9xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:53Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-rqbjv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:22Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:22 crc kubenswrapper[4792]: I0929 18:57:22.360139 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:22 crc kubenswrapper[4792]: I0929 18:57:22.360195 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:22 crc kubenswrapper[4792]: I0929 18:57:22.360214 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:22 crc kubenswrapper[4792]: I0929 18:57:22.360249 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:22 crc kubenswrapper[4792]: I0929 18:57:22.360270 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:22Z","lastTransitionTime":"2025-09-29T18:57:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:22 crc kubenswrapper[4792]: I0929 18:57:22.364805 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-v5b2m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fd292349-0e5a-4d80-b163-193aa43c98db\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:07Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:07Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:07Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d8ps7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d8ps7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:57:07Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-v5b2m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:22Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:22 crc kubenswrapper[4792]: I0929 18:57:22.376494 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-hr4cm_716c5fdd-0e02-4066-9210-93d805b6fe81/ovnkube-controller/2.log" Sep 29 18:57:22 crc kubenswrapper[4792]: I0929 18:57:22.384676 4792 scope.go:117] "RemoveContainer" containerID="1121dcf30430fdbc7f6353dacdf1f0233d9053a2acfd8e7e248bc6e9faaf66e2" Sep 29 18:57:22 crc kubenswrapper[4792]: E0929 18:57:22.384947 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-hr4cm_openshift-ovn-kubernetes(716c5fdd-0e02-4066-9210-93d805b6fe81)\"" pod="openshift-ovn-kubernetes/ovnkube-node-hr4cm" podUID="716c5fdd-0e02-4066-9210-93d805b6fe81" Sep 29 18:57:22 crc kubenswrapper[4792]: I0929 18:57:22.397174 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"10bc9cb6-78d3-43a6-8276-db1cb1c116e0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:29Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:29Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://238035b6ad975064a44e7e9e760ae9f09c9ff2735ecc69f65df3fc3176f6d3b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://659f798faed7d5c35bf7959b8e42a37f2289854714d513962716cb73a0674a27\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://38d29fa86782b007a60cbc1ea8f2ccc250dabb38c84eb61931b05fbc170e6538\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f292b6e3b4a31cc851066c3112871836e2c896dea8913da0d3c5579fe5ebb65\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0fdb8c0687f0a5ce31078f6d7a9b643c41ad23199eff4b2878403ee5fd31f69f\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T18:56:45Z\\\",\\\"message\\\":\\\"9 18:56:45.310775 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0929 18:56:45.310981 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI0929 18:56:45.312802 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0929 18:56:45.312836 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0929 18:56:45.312870 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0929 18:56:45.312900 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0929 18:56:45.312941 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI0929 18:56:45.312944 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0929 18:56:45.312985 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI0929 18:56:45.313033 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI0929 18:56:45.313121 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-682747971/tls.crt::/tmp/serving-cert-682747971/tls.key\\\\\\\"\\\\nI0929 18:56:45.313163 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-682747971/tls.crt::/tmp/serving-cert-682747971/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1759172204\\\\\\\\\\\\\\\" (2025-09-29 18:56:43 +0000 UTC to 2025-10-29 18:56:44 +0000 UTC (now=2025-09-29 18:56:45.313121069 +0000 UTC))\\\\\\\"\\\\nF0929 18:56:45.313206 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:44Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:57:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://07171a8c3c7812c016ee534ff1332f697d2b2cdfa70fc9d94ae6a5f312e0e433\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:32Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6e733b73d0293211bf2e8e97dc7db49c34e8ac1ef1e6e19013183d8518345959\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6e733b73d0293211bf2e8e97dc7db49c34e8ac1ef1e6e19013183d8518345959\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T18:56:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:29Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:22Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:22 crc kubenswrapper[4792]: I0929 18:57:22.433039 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ba8b9fef5faf6504a0e363f092cc9f60b03723775a0a0624b6302b3dac43a7ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:22Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:22 crc kubenswrapper[4792]: I0929 18:57:22.456430 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8067c4cf598ce2d361c4a76b51ef3cf14d1fc84ad7ee193d76e20cd980f197be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://693ef3ee15f0b8762a16adc20435397e073dea4b0028f4175899cb956eaab303\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:22Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:22 crc kubenswrapper[4792]: I0929 18:57:22.463665 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:22 crc kubenswrapper[4792]: I0929 18:57:22.463728 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:22 crc kubenswrapper[4792]: I0929 18:57:22.463748 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:22 crc kubenswrapper[4792]: I0929 18:57:22.463781 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:22 crc kubenswrapper[4792]: I0929 18:57:22.463800 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:22Z","lastTransitionTime":"2025-09-29T18:57:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:22 crc kubenswrapper[4792]: I0929 18:57:22.485908 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-hr4cm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"716c5fdd-0e02-4066-9210-93d805b6fe81\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:53Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:53Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7b64445ce1e067504326c5005136522f885ba8796579cfb651019d2372a89173\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://46a3cf64e8fd5f5c75be0dd56175bd00e95e2780c73e39558e3b68ca1e6a44bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3408e50d82d1e7f50d9cd4fb2b4e078059bbc4daba10ca93c3cab56d4fe190be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a5023531ae972c8f19f5fbf8cdb3c4040f1b63d5d7b9d00e885607f0f84c88a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e9625b3628f291ecaa686da104d719695bd8c46eb46d08f9eccab27a2013627\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3a44c0899a9afeaa74bb22565c3f9514603ce1b83f9794539f677d067785925\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1121dcf30430fdbc7f6353dacdf1f0233d9053a2acfd8e7e248bc6e9faaf66e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e402552ce11d3c59a676cb86052c03bd0f6ef0c17950b38a05cb3ae1f56e205d\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T18:57:07Z\\\",\\\"message\\\":\\\"DName:}]\\\\nI0929 18:57:07.047441 6143 transact.go:42] Configuring OVN: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-marketplace/redhat-marketplace]} name:Service_openshift-marketplace/redhat-marketplace_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.5.140:50051:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {97b6e7b0-06ca-455e-8259-06895040cb0c}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI0929 18:57:07.047477 6143 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF0929 18:57:07.047535 6143 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: handler {0x1fe5060 0x1fe4d40 0x1fe4ce0} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: cer\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T18:57:06Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1121dcf30430fdbc7f6353dacdf1f0233d9053a2acfd8e7e248bc6e9faaf66e2\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T18:57:20Z\\\",\\\"message\\\":\\\".861680 6354 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:20Z is after 2025-08-24T17:21:41Z]\\\\nI0929 18:57:20.861689 6354 base_network_controller_pods.go:477] [default/openshift-network-diagnostics/network-check-source-55646444c4-trplf] creating logical port openshift-network-diagnostics_network-check-source-55646444c4-trplf for pod on switch crc\\\\nI0929 18:57:20.861690 6354 services_controller.go:451] Built service openshift-kube-scheduler-operator/metrics cluster-wide LB for network=default: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-kube-scheduler-operator/metrics_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", E\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T18:57:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7138892e31e3d1949d0ae4789515fc0fd9868469eb14de1464a2f59786b85f08\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d0516004c2ea4a5711f5e00dcfa01fd5c8d0c0d0d60fd31b0d7da586cd83a820\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d0516004c2ea4a5711f5e00dcfa01fd5c8d0c0d0d60fd31b0d7da586cd83a820\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T18:56:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:53Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-hr4cm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:22Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:22 crc kubenswrapper[4792]: I0929 18:57:22.512502 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-5hwvp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"100876d3-2539-47f1-91fa-0f91456ccac1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3ff4d11cfba0349ddf3f5a14c525716cfdff95c71698634e8feca328d6e41e2d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfblz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:53Z\\\"}}\" for pod \"openshift-multus\"/\"multus-5hwvp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:22Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:22 crc kubenswrapper[4792]: I0929 18:57:22.536425 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:22Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:22 crc kubenswrapper[4792]: I0929 18:57:22.557082 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-c228l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc16dcda-372e-4aac-8c12-148bf93e8783\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://af2529526423852e215c3201a4d8807a880e07e9cf71d593f304a4a3c99900eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6mz5l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:52Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-c228l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:22Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:22 crc kubenswrapper[4792]: I0929 18:57:22.567157 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:22 crc kubenswrapper[4792]: I0929 18:57:22.567221 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:22 crc kubenswrapper[4792]: I0929 18:57:22.567241 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:22 crc kubenswrapper[4792]: I0929 18:57:22.567303 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:22 crc kubenswrapper[4792]: I0929 18:57:22.567323 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:22Z","lastTransitionTime":"2025-09-29T18:57:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:22 crc kubenswrapper[4792]: I0929 18:57:22.580019 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-rr4g5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1762a3e4-6068-48d9-9b1d-bd5b893803bb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7d6bc8edd4388d2cc81a9741b23060a4982a85a1ac8ec23f2052436cff8cd7fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:57:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xnnkn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8053630e82cf267f19de300922033d2cade4b754707ca7c0d7fbfd7e4957eefc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:57:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xnnkn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:57:06Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-rr4g5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:22Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:22 crc kubenswrapper[4792]: I0929 18:57:22.603328 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4df360a8-146c-4e9e-8e52-498553bdf779\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1f4bce20bf980ce88d41df2f3c40acfc93739122e3173ea15ceb0122219338f8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d2e3b84a17bd084dfa82505af7e250aea64057db67fdf494d8653ba30a883b6d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c31040d1362524ea1a3ee9961c3eb7b97c7c76709465df7590129430ddd3bb4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://db5ac0d0d3d08ebd2278af02df1e4639df5c635bfee2ec6fb8293e18c648af76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://db5ac0d0d3d08ebd2278af02df1e4639df5c635bfee2ec6fb8293e18c648af76\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T18:56:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:30Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:29Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:22Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:22 crc kubenswrapper[4792]: I0929 18:57:22.630898 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:22Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:22 crc kubenswrapper[4792]: I0929 18:57:22.654180 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:22Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:22 crc kubenswrapper[4792]: I0929 18:57:22.670108 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:22 crc kubenswrapper[4792]: I0929 18:57:22.670154 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:22 crc kubenswrapper[4792]: I0929 18:57:22.670167 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:22 crc kubenswrapper[4792]: I0929 18:57:22.670186 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:22 crc kubenswrapper[4792]: I0929 18:57:22.670201 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:22Z","lastTransitionTime":"2025-09-29T18:57:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:22 crc kubenswrapper[4792]: I0929 18:57:22.671122 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-4gmtk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b84b9e91-b50e-4271-bfc8-be15652128c5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b32084075b7423c8211ca56595a2eb11add581b500043804cb09f13d07788bd6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lc999\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:55Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-4gmtk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:22Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:22 crc kubenswrapper[4792]: I0929 18:57:22.690906 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0ae66548-086e-4ca9-bd6f-281ce46e7557\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b27d8e307d9f6545acd48d9a838dc98fec84ca2e48b357966af22144b8cd415f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://305645f1f10b20984067c3d0d32bc9a5936e191faecff2bb494be005fc471c65\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:53Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-p5q59\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:22Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:22 crc kubenswrapper[4792]: I0929 18:57:22.708573 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-rqbjv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"67c58ee5-e056-4e3e-91ed-a116350f2408\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://17470ef608c6b717c0346349c1e72046e200b3879df2772778878b0e83c05b7e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:57:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ms9xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cdd799a430b3a444b7ba74ae8c285de28790049390a462485812fe117f9dfbe8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cdd799a430b3a444b7ba74ae8c285de28790049390a462485812fe117f9dfbe8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T18:56:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ms9xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1d779fed3cf67ba40d6664f26d829858ec14749c48c09678b73d7fb8fe73c827\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1d779fed3cf67ba40d6664f26d829858ec14749c48c09678b73d7fb8fe73c827\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T18:56:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ms9xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3094a1172df2fd98e699c4d368a14584e51bce43389c9c6432e24e78d460a3eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3094a1172df2fd98e699c4d368a14584e51bce43389c9c6432e24e78d460a3eb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T18:56:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ms9xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://79dfa5c03ec31df7b6477646c437b7490658801c0b8f7fac5e9149e4c7a882bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://79dfa5c03ec31df7b6477646c437b7490658801c0b8f7fac5e9149e4c7a882bf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T18:56:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ms9xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://aed6e427a87d4a4617a9d1c9a4d37cf2f9815d1759336026545d563b1f9b6976\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aed6e427a87d4a4617a9d1c9a4d37cf2f9815d1759336026545d563b1f9b6976\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T18:57:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ms9xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0830e9f46c282fc2be6beeea2654758eb0b3a0a86b802f495928c846bc49f7ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0830e9f46c282fc2be6beeea2654758eb0b3a0a86b802f495928c846bc49f7ed\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T18:57:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T18:57:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ms9xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:53Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-rqbjv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:22Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:22 crc kubenswrapper[4792]: I0929 18:57:22.724780 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-v5b2m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fd292349-0e5a-4d80-b163-193aa43c98db\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:07Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:07Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:07Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d8ps7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d8ps7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:57:07Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-v5b2m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:22Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:22 crc kubenswrapper[4792]: I0929 18:57:22.741988 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"49187618-8fed-4b0f-bdf8-800408f708fc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://325b543480e9e1abd49c6ce98398a79ef51983b8035774b2e88447ee547733c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://12d3875b8db9620798f766024b1bc43b78759f42e467b67aaf87f0b0154a8fad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://100ab44da711fddded7f88aa053b6a47d1c8302557d9ae6a56d4f744140e34bd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://af34e705a941f92c031edf3d214a902640010036401914f60e598a46043d5eb3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:29Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:22Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:22 crc kubenswrapper[4792]: I0929 18:57:22.758726 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:52Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:52Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cb29207afd9a5fb06242890aaf6d32f2f789cbf824b0246706e7214486ac529c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:22Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:22 crc kubenswrapper[4792]: I0929 18:57:22.772887 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:22 crc kubenswrapper[4792]: I0929 18:57:22.772921 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:22 crc kubenswrapper[4792]: I0929 18:57:22.772935 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:22 crc kubenswrapper[4792]: I0929 18:57:22.772955 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:22 crc kubenswrapper[4792]: I0929 18:57:22.772968 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:22Z","lastTransitionTime":"2025-09-29T18:57:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:22 crc kubenswrapper[4792]: I0929 18:57:22.774110 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8067c4cf598ce2d361c4a76b51ef3cf14d1fc84ad7ee193d76e20cd980f197be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://693ef3ee15f0b8762a16adc20435397e073dea4b0028f4175899cb956eaab303\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:22Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:22 crc kubenswrapper[4792]: I0929 18:57:22.796098 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-hr4cm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"716c5fdd-0e02-4066-9210-93d805b6fe81\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:53Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:53Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7b64445ce1e067504326c5005136522f885ba8796579cfb651019d2372a89173\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://46a3cf64e8fd5f5c75be0dd56175bd00e95e2780c73e39558e3b68ca1e6a44bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3408e50d82d1e7f50d9cd4fb2b4e078059bbc4daba10ca93c3cab56d4fe190be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a5023531ae972c8f19f5fbf8cdb3c4040f1b63d5d7b9d00e885607f0f84c88a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e9625b3628f291ecaa686da104d719695bd8c46eb46d08f9eccab27a2013627\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3a44c0899a9afeaa74bb22565c3f9514603ce1b83f9794539f677d067785925\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1121dcf30430fdbc7f6353dacdf1f0233d9053a2acfd8e7e248bc6e9faaf66e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1121dcf30430fdbc7f6353dacdf1f0233d9053a2acfd8e7e248bc6e9faaf66e2\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T18:57:20Z\\\",\\\"message\\\":\\\".861680 6354 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:20Z is after 2025-08-24T17:21:41Z]\\\\nI0929 18:57:20.861689 6354 base_network_controller_pods.go:477] [default/openshift-network-diagnostics/network-check-source-55646444c4-trplf] creating logical port openshift-network-diagnostics_network-check-source-55646444c4-trplf for pod on switch crc\\\\nI0929 18:57:20.861690 6354 services_controller.go:451] Built service openshift-kube-scheduler-operator/metrics cluster-wide LB for network=default: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-kube-scheduler-operator/metrics_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", E\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T18:57:20Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-hr4cm_openshift-ovn-kubernetes(716c5fdd-0e02-4066-9210-93d805b6fe81)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7138892e31e3d1949d0ae4789515fc0fd9868469eb14de1464a2f59786b85f08\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d0516004c2ea4a5711f5e00dcfa01fd5c8d0c0d0d60fd31b0d7da586cd83a820\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d0516004c2ea4a5711f5e00dcfa01fd5c8d0c0d0d60fd31b0d7da586cd83a820\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T18:56:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:53Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-hr4cm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:22Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:22 crc kubenswrapper[4792]: I0929 18:57:22.813531 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-5hwvp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"100876d3-2539-47f1-91fa-0f91456ccac1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3ff4d11cfba0349ddf3f5a14c525716cfdff95c71698634e8feca328d6e41e2d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfblz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:53Z\\\"}}\" for pod \"openshift-multus\"/\"multus-5hwvp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:22Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:22 crc kubenswrapper[4792]: I0929 18:57:22.829649 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"10bc9cb6-78d3-43a6-8276-db1cb1c116e0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:29Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:29Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://238035b6ad975064a44e7e9e760ae9f09c9ff2735ecc69f65df3fc3176f6d3b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://659f798faed7d5c35bf7959b8e42a37f2289854714d513962716cb73a0674a27\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://38d29fa86782b007a60cbc1ea8f2ccc250dabb38c84eb61931b05fbc170e6538\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f292b6e3b4a31cc851066c3112871836e2c896dea8913da0d3c5579fe5ebb65\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0fdb8c0687f0a5ce31078f6d7a9b643c41ad23199eff4b2878403ee5fd31f69f\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T18:56:45Z\\\",\\\"message\\\":\\\"9 18:56:45.310775 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0929 18:56:45.310981 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI0929 18:56:45.312802 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0929 18:56:45.312836 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0929 18:56:45.312870 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0929 18:56:45.312900 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0929 18:56:45.312941 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI0929 18:56:45.312944 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0929 18:56:45.312985 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI0929 18:56:45.313033 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI0929 18:56:45.313121 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-682747971/tls.crt::/tmp/serving-cert-682747971/tls.key\\\\\\\"\\\\nI0929 18:56:45.313163 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-682747971/tls.crt::/tmp/serving-cert-682747971/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1759172204\\\\\\\\\\\\\\\" (2025-09-29 18:56:43 +0000 UTC to 2025-10-29 18:56:44 +0000 UTC (now=2025-09-29 18:56:45.313121069 +0000 UTC))\\\\\\\"\\\\nF0929 18:56:45.313206 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:44Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:57:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://07171a8c3c7812c016ee534ff1332f697d2b2cdfa70fc9d94ae6a5f312e0e433\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:32Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6e733b73d0293211bf2e8e97dc7db49c34e8ac1ef1e6e19013183d8518345959\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6e733b73d0293211bf2e8e97dc7db49c34e8ac1ef1e6e19013183d8518345959\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T18:56:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:29Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:22Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:22 crc kubenswrapper[4792]: I0929 18:57:22.851950 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ba8b9fef5faf6504a0e363f092cc9f60b03723775a0a0624b6302b3dac43a7ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:22Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:22 crc kubenswrapper[4792]: I0929 18:57:22.876274 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:22 crc kubenswrapper[4792]: I0929 18:57:22.876318 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:22 crc kubenswrapper[4792]: I0929 18:57:22.876333 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:22 crc kubenswrapper[4792]: I0929 18:57:22.876353 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:22 crc kubenswrapper[4792]: I0929 18:57:22.876368 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:22Z","lastTransitionTime":"2025-09-29T18:57:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:22 crc kubenswrapper[4792]: I0929 18:57:22.979375 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:22 crc kubenswrapper[4792]: I0929 18:57:22.979449 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:22 crc kubenswrapper[4792]: I0929 18:57:22.979469 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:22 crc kubenswrapper[4792]: I0929 18:57:22.979497 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:22 crc kubenswrapper[4792]: I0929 18:57:22.979519 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:22Z","lastTransitionTime":"2025-09-29T18:57:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:23 crc kubenswrapper[4792]: I0929 18:57:23.014960 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 18:57:23 crc kubenswrapper[4792]: I0929 18:57:23.015068 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-v5b2m" Sep 29 18:57:23 crc kubenswrapper[4792]: I0929 18:57:23.015014 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 18:57:23 crc kubenswrapper[4792]: I0929 18:57:23.014974 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 18:57:23 crc kubenswrapper[4792]: E0929 18:57:23.015212 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 18:57:23 crc kubenswrapper[4792]: E0929 18:57:23.015405 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-v5b2m" podUID="fd292349-0e5a-4d80-b163-193aa43c98db" Sep 29 18:57:23 crc kubenswrapper[4792]: E0929 18:57:23.015599 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 18:57:23 crc kubenswrapper[4792]: E0929 18:57:23.015657 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 18:57:23 crc kubenswrapper[4792]: I0929 18:57:23.082174 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:23 crc kubenswrapper[4792]: I0929 18:57:23.082226 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:23 crc kubenswrapper[4792]: I0929 18:57:23.082239 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:23 crc kubenswrapper[4792]: I0929 18:57:23.082270 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:23 crc kubenswrapper[4792]: I0929 18:57:23.082289 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:23Z","lastTransitionTime":"2025-09-29T18:57:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:23 crc kubenswrapper[4792]: I0929 18:57:23.184461 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:23 crc kubenswrapper[4792]: I0929 18:57:23.184506 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:23 crc kubenswrapper[4792]: I0929 18:57:23.184521 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:23 crc kubenswrapper[4792]: I0929 18:57:23.184537 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:23 crc kubenswrapper[4792]: I0929 18:57:23.184548 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:23Z","lastTransitionTime":"2025-09-29T18:57:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:23 crc kubenswrapper[4792]: I0929 18:57:23.280759 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/fd292349-0e5a-4d80-b163-193aa43c98db-metrics-certs\") pod \"network-metrics-daemon-v5b2m\" (UID: \"fd292349-0e5a-4d80-b163-193aa43c98db\") " pod="openshift-multus/network-metrics-daemon-v5b2m" Sep 29 18:57:23 crc kubenswrapper[4792]: E0929 18:57:23.281049 4792 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Sep 29 18:57:23 crc kubenswrapper[4792]: E0929 18:57:23.281187 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/fd292349-0e5a-4d80-b163-193aa43c98db-metrics-certs podName:fd292349-0e5a-4d80-b163-193aa43c98db nodeName:}" failed. No retries permitted until 2025-09-29 18:57:39.281146147 +0000 UTC m=+71.274453583 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/fd292349-0e5a-4d80-b163-193aa43c98db-metrics-certs") pod "network-metrics-daemon-v5b2m" (UID: "fd292349-0e5a-4d80-b163-193aa43c98db") : object "openshift-multus"/"metrics-daemon-secret" not registered Sep 29 18:57:23 crc kubenswrapper[4792]: I0929 18:57:23.287958 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:23 crc kubenswrapper[4792]: I0929 18:57:23.288029 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:23 crc kubenswrapper[4792]: I0929 18:57:23.288050 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:23 crc kubenswrapper[4792]: I0929 18:57:23.288076 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:23 crc kubenswrapper[4792]: I0929 18:57:23.288094 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:23Z","lastTransitionTime":"2025-09-29T18:57:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:23 crc kubenswrapper[4792]: I0929 18:57:23.392161 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:23 crc kubenswrapper[4792]: I0929 18:57:23.392247 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:23 crc kubenswrapper[4792]: I0929 18:57:23.392275 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:23 crc kubenswrapper[4792]: I0929 18:57:23.392310 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:23 crc kubenswrapper[4792]: I0929 18:57:23.392336 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:23Z","lastTransitionTime":"2025-09-29T18:57:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:23 crc kubenswrapper[4792]: I0929 18:57:23.497125 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:23 crc kubenswrapper[4792]: I0929 18:57:23.497187 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:23 crc kubenswrapper[4792]: I0929 18:57:23.497197 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:23 crc kubenswrapper[4792]: I0929 18:57:23.497221 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:23 crc kubenswrapper[4792]: I0929 18:57:23.497233 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:23Z","lastTransitionTime":"2025-09-29T18:57:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:23 crc kubenswrapper[4792]: I0929 18:57:23.613492 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:23 crc kubenswrapper[4792]: I0929 18:57:23.614413 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:23 crc kubenswrapper[4792]: I0929 18:57:23.614426 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:23 crc kubenswrapper[4792]: I0929 18:57:23.614449 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:23 crc kubenswrapper[4792]: I0929 18:57:23.614461 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:23Z","lastTransitionTime":"2025-09-29T18:57:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:23 crc kubenswrapper[4792]: I0929 18:57:23.717989 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:23 crc kubenswrapper[4792]: I0929 18:57:23.718401 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:23 crc kubenswrapper[4792]: I0929 18:57:23.718555 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:23 crc kubenswrapper[4792]: I0929 18:57:23.718740 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:23 crc kubenswrapper[4792]: I0929 18:57:23.718994 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:23Z","lastTransitionTime":"2025-09-29T18:57:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:23 crc kubenswrapper[4792]: I0929 18:57:23.823800 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:23 crc kubenswrapper[4792]: I0929 18:57:23.823884 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:23 crc kubenswrapper[4792]: I0929 18:57:23.823906 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:23 crc kubenswrapper[4792]: I0929 18:57:23.823933 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:23 crc kubenswrapper[4792]: I0929 18:57:23.823962 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:23Z","lastTransitionTime":"2025-09-29T18:57:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:23 crc kubenswrapper[4792]: I0929 18:57:23.929218 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:23 crc kubenswrapper[4792]: I0929 18:57:23.929294 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:23 crc kubenswrapper[4792]: I0929 18:57:23.929311 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:23 crc kubenswrapper[4792]: I0929 18:57:23.929334 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:23 crc kubenswrapper[4792]: I0929 18:57:23.929348 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:23Z","lastTransitionTime":"2025-09-29T18:57:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:24 crc kubenswrapper[4792]: I0929 18:57:24.032384 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:24 crc kubenswrapper[4792]: I0929 18:57:24.032443 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:24 crc kubenswrapper[4792]: I0929 18:57:24.032457 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:24 crc kubenswrapper[4792]: I0929 18:57:24.032480 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:24 crc kubenswrapper[4792]: I0929 18:57:24.032495 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:24Z","lastTransitionTime":"2025-09-29T18:57:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:24 crc kubenswrapper[4792]: I0929 18:57:24.135275 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:24 crc kubenswrapper[4792]: I0929 18:57:24.135323 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:24 crc kubenswrapper[4792]: I0929 18:57:24.135338 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:24 crc kubenswrapper[4792]: I0929 18:57:24.135361 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:24 crc kubenswrapper[4792]: I0929 18:57:24.135378 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:24Z","lastTransitionTime":"2025-09-29T18:57:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:24 crc kubenswrapper[4792]: I0929 18:57:24.238059 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:24 crc kubenswrapper[4792]: I0929 18:57:24.238125 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:24 crc kubenswrapper[4792]: I0929 18:57:24.238160 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:24 crc kubenswrapper[4792]: I0929 18:57:24.238196 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:24 crc kubenswrapper[4792]: I0929 18:57:24.238216 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:24Z","lastTransitionTime":"2025-09-29T18:57:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:24 crc kubenswrapper[4792]: I0929 18:57:24.340997 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:24 crc kubenswrapper[4792]: I0929 18:57:24.341058 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:24 crc kubenswrapper[4792]: I0929 18:57:24.341076 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:24 crc kubenswrapper[4792]: I0929 18:57:24.341107 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:24 crc kubenswrapper[4792]: I0929 18:57:24.341130 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:24Z","lastTransitionTime":"2025-09-29T18:57:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:24 crc kubenswrapper[4792]: I0929 18:57:24.445084 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:24 crc kubenswrapper[4792]: I0929 18:57:24.445204 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:24 crc kubenswrapper[4792]: I0929 18:57:24.445230 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:24 crc kubenswrapper[4792]: I0929 18:57:24.445260 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:24 crc kubenswrapper[4792]: I0929 18:57:24.445279 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:24Z","lastTransitionTime":"2025-09-29T18:57:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:24 crc kubenswrapper[4792]: I0929 18:57:24.549056 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:24 crc kubenswrapper[4792]: I0929 18:57:24.549211 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:24 crc kubenswrapper[4792]: I0929 18:57:24.549236 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:24 crc kubenswrapper[4792]: I0929 18:57:24.549268 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:24 crc kubenswrapper[4792]: I0929 18:57:24.549294 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:24Z","lastTransitionTime":"2025-09-29T18:57:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:24 crc kubenswrapper[4792]: I0929 18:57:24.653899 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:24 crc kubenswrapper[4792]: I0929 18:57:24.653946 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:24 crc kubenswrapper[4792]: I0929 18:57:24.653959 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:24 crc kubenswrapper[4792]: I0929 18:57:24.653975 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:24 crc kubenswrapper[4792]: I0929 18:57:24.653985 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:24Z","lastTransitionTime":"2025-09-29T18:57:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:24 crc kubenswrapper[4792]: I0929 18:57:24.756870 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:24 crc kubenswrapper[4792]: I0929 18:57:24.756933 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:24 crc kubenswrapper[4792]: I0929 18:57:24.756950 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:24 crc kubenswrapper[4792]: I0929 18:57:24.756974 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:24 crc kubenswrapper[4792]: I0929 18:57:24.756993 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:24Z","lastTransitionTime":"2025-09-29T18:57:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:24 crc kubenswrapper[4792]: I0929 18:57:24.862408 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:24 crc kubenswrapper[4792]: I0929 18:57:24.862487 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:24 crc kubenswrapper[4792]: I0929 18:57:24.862509 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:24 crc kubenswrapper[4792]: I0929 18:57:24.862538 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:24 crc kubenswrapper[4792]: I0929 18:57:24.862560 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:24Z","lastTransitionTime":"2025-09-29T18:57:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:24 crc kubenswrapper[4792]: I0929 18:57:24.966177 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:24 crc kubenswrapper[4792]: I0929 18:57:24.966228 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:24 crc kubenswrapper[4792]: I0929 18:57:24.966240 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:24 crc kubenswrapper[4792]: I0929 18:57:24.966267 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:24 crc kubenswrapper[4792]: I0929 18:57:24.966279 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:24Z","lastTransitionTime":"2025-09-29T18:57:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:25 crc kubenswrapper[4792]: I0929 18:57:25.015072 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 18:57:25 crc kubenswrapper[4792]: I0929 18:57:25.015178 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-v5b2m" Sep 29 18:57:25 crc kubenswrapper[4792]: E0929 18:57:25.015210 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 18:57:25 crc kubenswrapper[4792]: E0929 18:57:25.015447 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-v5b2m" podUID="fd292349-0e5a-4d80-b163-193aa43c98db" Sep 29 18:57:25 crc kubenswrapper[4792]: I0929 18:57:25.015491 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 18:57:25 crc kubenswrapper[4792]: I0929 18:57:25.015521 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 18:57:25 crc kubenswrapper[4792]: E0929 18:57:25.015613 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 18:57:25 crc kubenswrapper[4792]: E0929 18:57:25.015666 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 18:57:25 crc kubenswrapper[4792]: I0929 18:57:25.030330 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 29 18:57:25 crc kubenswrapper[4792]: I0929 18:57:25.052156 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:25Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:25 crc kubenswrapper[4792]: I0929 18:57:25.068371 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-4gmtk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b84b9e91-b50e-4271-bfc8-be15652128c5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b32084075b7423c8211ca56595a2eb11add581b500043804cb09f13d07788bd6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lc999\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:55Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-4gmtk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:25Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:25 crc kubenswrapper[4792]: I0929 18:57:25.069209 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:25 crc kubenswrapper[4792]: I0929 18:57:25.069364 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:25 crc kubenswrapper[4792]: I0929 18:57:25.069394 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:25 crc kubenswrapper[4792]: I0929 18:57:25.069469 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:25 crc kubenswrapper[4792]: I0929 18:57:25.069542 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:25Z","lastTransitionTime":"2025-09-29T18:57:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:25 crc kubenswrapper[4792]: I0929 18:57:25.089323 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0ae66548-086e-4ca9-bd6f-281ce46e7557\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b27d8e307d9f6545acd48d9a838dc98fec84ca2e48b357966af22144b8cd415f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://305645f1f10b20984067c3d0d32bc9a5936e191faecff2bb494be005fc471c65\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:53Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-p5q59\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:25Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:25 crc kubenswrapper[4792]: I0929 18:57:25.111033 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-rqbjv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"67c58ee5-e056-4e3e-91ed-a116350f2408\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://17470ef608c6b717c0346349c1e72046e200b3879df2772778878b0e83c05b7e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:57:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ms9xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cdd799a430b3a444b7ba74ae8c285de28790049390a462485812fe117f9dfbe8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cdd799a430b3a444b7ba74ae8c285de28790049390a462485812fe117f9dfbe8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T18:56:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ms9xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1d779fed3cf67ba40d6664f26d829858ec14749c48c09678b73d7fb8fe73c827\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1d779fed3cf67ba40d6664f26d829858ec14749c48c09678b73d7fb8fe73c827\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T18:56:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ms9xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3094a1172df2fd98e699c4d368a14584e51bce43389c9c6432e24e78d460a3eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3094a1172df2fd98e699c4d368a14584e51bce43389c9c6432e24e78d460a3eb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T18:56:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ms9xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://79dfa5c03ec31df7b6477646c437b7490658801c0b8f7fac5e9149e4c7a882bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://79dfa5c03ec31df7b6477646c437b7490658801c0b8f7fac5e9149e4c7a882bf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T18:56:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ms9xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://aed6e427a87d4a4617a9d1c9a4d37cf2f9815d1759336026545d563b1f9b6976\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aed6e427a87d4a4617a9d1c9a4d37cf2f9815d1759336026545d563b1f9b6976\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T18:57:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ms9xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0830e9f46c282fc2be6beeea2654758eb0b3a0a86b802f495928c846bc49f7ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0830e9f46c282fc2be6beeea2654758eb0b3a0a86b802f495928c846bc49f7ed\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T18:57:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T18:57:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ms9xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:53Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-rqbjv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:25Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:25 crc kubenswrapper[4792]: I0929 18:57:25.128152 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-v5b2m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fd292349-0e5a-4d80-b163-193aa43c98db\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:07Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:07Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:07Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d8ps7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d8ps7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:57:07Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-v5b2m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:25Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:25 crc kubenswrapper[4792]: I0929 18:57:25.144315 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"49187618-8fed-4b0f-bdf8-800408f708fc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://325b543480e9e1abd49c6ce98398a79ef51983b8035774b2e88447ee547733c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://12d3875b8db9620798f766024b1bc43b78759f42e467b67aaf87f0b0154a8fad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://100ab44da711fddded7f88aa053b6a47d1c8302557d9ae6a56d4f744140e34bd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://af34e705a941f92c031edf3d214a902640010036401914f60e598a46043d5eb3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:29Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:25Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:25 crc kubenswrapper[4792]: I0929 18:57:25.162398 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:52Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:52Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cb29207afd9a5fb06242890aaf6d32f2f789cbf824b0246706e7214486ac529c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:25Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:25 crc kubenswrapper[4792]: I0929 18:57:25.172766 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:25 crc kubenswrapper[4792]: I0929 18:57:25.172808 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:25 crc kubenswrapper[4792]: I0929 18:57:25.172819 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:25 crc kubenswrapper[4792]: I0929 18:57:25.172841 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:25 crc kubenswrapper[4792]: I0929 18:57:25.172867 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:25Z","lastTransitionTime":"2025-09-29T18:57:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:25 crc kubenswrapper[4792]: I0929 18:57:25.186419 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8067c4cf598ce2d361c4a76b51ef3cf14d1fc84ad7ee193d76e20cd980f197be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://693ef3ee15f0b8762a16adc20435397e073dea4b0028f4175899cb956eaab303\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:25Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:25 crc kubenswrapper[4792]: I0929 18:57:25.219450 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-hr4cm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"716c5fdd-0e02-4066-9210-93d805b6fe81\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:53Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:53Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7b64445ce1e067504326c5005136522f885ba8796579cfb651019d2372a89173\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://46a3cf64e8fd5f5c75be0dd56175bd00e95e2780c73e39558e3b68ca1e6a44bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3408e50d82d1e7f50d9cd4fb2b4e078059bbc4daba10ca93c3cab56d4fe190be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a5023531ae972c8f19f5fbf8cdb3c4040f1b63d5d7b9d00e885607f0f84c88a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e9625b3628f291ecaa686da104d719695bd8c46eb46d08f9eccab27a2013627\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3a44c0899a9afeaa74bb22565c3f9514603ce1b83f9794539f677d067785925\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1121dcf30430fdbc7f6353dacdf1f0233d9053a2acfd8e7e248bc6e9faaf66e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1121dcf30430fdbc7f6353dacdf1f0233d9053a2acfd8e7e248bc6e9faaf66e2\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T18:57:20Z\\\",\\\"message\\\":\\\".861680 6354 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:20Z is after 2025-08-24T17:21:41Z]\\\\nI0929 18:57:20.861689 6354 base_network_controller_pods.go:477] [default/openshift-network-diagnostics/network-check-source-55646444c4-trplf] creating logical port openshift-network-diagnostics_network-check-source-55646444c4-trplf for pod on switch crc\\\\nI0929 18:57:20.861690 6354 services_controller.go:451] Built service openshift-kube-scheduler-operator/metrics cluster-wide LB for network=default: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-kube-scheduler-operator/metrics_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", E\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T18:57:20Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-hr4cm_openshift-ovn-kubernetes(716c5fdd-0e02-4066-9210-93d805b6fe81)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7138892e31e3d1949d0ae4789515fc0fd9868469eb14de1464a2f59786b85f08\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d0516004c2ea4a5711f5e00dcfa01fd5c8d0c0d0d60fd31b0d7da586cd83a820\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d0516004c2ea4a5711f5e00dcfa01fd5c8d0c0d0d60fd31b0d7da586cd83a820\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T18:56:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:53Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-hr4cm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:25Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:25 crc kubenswrapper[4792]: I0929 18:57:25.243688 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-5hwvp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"100876d3-2539-47f1-91fa-0f91456ccac1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3ff4d11cfba0349ddf3f5a14c525716cfdff95c71698634e8feca328d6e41e2d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfblz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:53Z\\\"}}\" for pod \"openshift-multus\"/\"multus-5hwvp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:25Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:25 crc kubenswrapper[4792]: I0929 18:57:25.260681 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"10bc9cb6-78d3-43a6-8276-db1cb1c116e0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://238035b6ad975064a44e7e9e760ae9f09c9ff2735ecc69f65df3fc3176f6d3b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://659f798faed7d5c35bf7959b8e42a37f2289854714d513962716cb73a0674a27\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://38d29fa86782b007a60cbc1ea8f2ccc250dabb38c84eb61931b05fbc170e6538\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f292b6e3b4a31cc851066c3112871836e2c896dea8913da0d3c5579fe5ebb65\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0fdb8c0687f0a5ce31078f6d7a9b643c41ad23199eff4b2878403ee5fd31f69f\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T18:56:45Z\\\",\\\"message\\\":\\\"9 18:56:45.310775 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0929 18:56:45.310981 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI0929 18:56:45.312802 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0929 18:56:45.312836 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0929 18:56:45.312870 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0929 18:56:45.312900 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0929 18:56:45.312941 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI0929 18:56:45.312944 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0929 18:56:45.312985 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI0929 18:56:45.313033 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI0929 18:56:45.313121 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-682747971/tls.crt::/tmp/serving-cert-682747971/tls.key\\\\\\\"\\\\nI0929 18:56:45.313163 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-682747971/tls.crt::/tmp/serving-cert-682747971/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1759172204\\\\\\\\\\\\\\\" (2025-09-29 18:56:43 +0000 UTC to 2025-10-29 18:56:44 +0000 UTC (now=2025-09-29 18:56:45.313121069 +0000 UTC))\\\\\\\"\\\\nF0929 18:56:45.313206 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:44Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:57:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://07171a8c3c7812c016ee534ff1332f697d2b2cdfa70fc9d94ae6a5f312e0e433\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:32Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6e733b73d0293211bf2e8e97dc7db49c34e8ac1ef1e6e19013183d8518345959\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6e733b73d0293211bf2e8e97dc7db49c34e8ac1ef1e6e19013183d8518345959\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T18:56:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:29Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:25Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:25 crc kubenswrapper[4792]: I0929 18:57:25.275846 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:25 crc kubenswrapper[4792]: I0929 18:57:25.275930 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:25 crc kubenswrapper[4792]: I0929 18:57:25.275949 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:25 crc kubenswrapper[4792]: I0929 18:57:25.275970 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:25 crc kubenswrapper[4792]: I0929 18:57:25.275984 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:25Z","lastTransitionTime":"2025-09-29T18:57:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:25 crc kubenswrapper[4792]: I0929 18:57:25.281441 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ba8b9fef5faf6504a0e363f092cc9f60b03723775a0a0624b6302b3dac43a7ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:25Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:25 crc kubenswrapper[4792]: I0929 18:57:25.303811 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:25Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:25 crc kubenswrapper[4792]: I0929 18:57:25.319588 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-c228l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc16dcda-372e-4aac-8c12-148bf93e8783\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://af2529526423852e215c3201a4d8807a880e07e9cf71d593f304a4a3c99900eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6mz5l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:52Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-c228l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:25Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:25 crc kubenswrapper[4792]: I0929 18:57:25.344385 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-rr4g5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1762a3e4-6068-48d9-9b1d-bd5b893803bb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7d6bc8edd4388d2cc81a9741b23060a4982a85a1ac8ec23f2052436cff8cd7fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:57:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xnnkn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8053630e82cf267f19de300922033d2cade4b754707ca7c0d7fbfd7e4957eefc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:57:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xnnkn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:57:06Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-rr4g5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:25Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:25 crc kubenswrapper[4792]: I0929 18:57:25.364986 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4df360a8-146c-4e9e-8e52-498553bdf779\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1f4bce20bf980ce88d41df2f3c40acfc93739122e3173ea15ceb0122219338f8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d2e3b84a17bd084dfa82505af7e250aea64057db67fdf494d8653ba30a883b6d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c31040d1362524ea1a3ee9961c3eb7b97c7c76709465df7590129430ddd3bb4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://db5ac0d0d3d08ebd2278af02df1e4639df5c635bfee2ec6fb8293e18c648af76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://db5ac0d0d3d08ebd2278af02df1e4639df5c635bfee2ec6fb8293e18c648af76\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T18:56:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:30Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:29Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:25Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:25 crc kubenswrapper[4792]: I0929 18:57:25.379588 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:25 crc kubenswrapper[4792]: I0929 18:57:25.379639 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:25 crc kubenswrapper[4792]: I0929 18:57:25.379658 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:25 crc kubenswrapper[4792]: I0929 18:57:25.379685 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:25 crc kubenswrapper[4792]: I0929 18:57:25.379704 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:25Z","lastTransitionTime":"2025-09-29T18:57:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:25 crc kubenswrapper[4792]: I0929 18:57:25.381615 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:25Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:25 crc kubenswrapper[4792]: I0929 18:57:25.482760 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:25 crc kubenswrapper[4792]: I0929 18:57:25.483365 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:25 crc kubenswrapper[4792]: I0929 18:57:25.483383 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:25 crc kubenswrapper[4792]: I0929 18:57:25.483408 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:25 crc kubenswrapper[4792]: I0929 18:57:25.483423 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:25Z","lastTransitionTime":"2025-09-29T18:57:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:25 crc kubenswrapper[4792]: I0929 18:57:25.547216 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:25 crc kubenswrapper[4792]: I0929 18:57:25.547248 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:25 crc kubenswrapper[4792]: I0929 18:57:25.547259 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:25 crc kubenswrapper[4792]: I0929 18:57:25.547274 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:25 crc kubenswrapper[4792]: I0929 18:57:25.547286 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:25Z","lastTransitionTime":"2025-09-29T18:57:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:25 crc kubenswrapper[4792]: E0929 18:57:25.568951 4792 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T18:57:25Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:25Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T18:57:25Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:25Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T18:57:25Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:25Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T18:57:25Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:25Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"2b56982a-4dd9-4681-8997-0d414fe55985\\\",\\\"systemUUID\\\":\\\"798197c6-3029-4938-8b57-256852c71a3e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:25Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:25 crc kubenswrapper[4792]: I0929 18:57:25.574877 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:25 crc kubenswrapper[4792]: I0929 18:57:25.574955 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:25 crc kubenswrapper[4792]: I0929 18:57:25.574980 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:25 crc kubenswrapper[4792]: I0929 18:57:25.575011 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:25 crc kubenswrapper[4792]: I0929 18:57:25.575033 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:25Z","lastTransitionTime":"2025-09-29T18:57:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:25 crc kubenswrapper[4792]: E0929 18:57:25.591834 4792 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T18:57:25Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:25Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T18:57:25Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:25Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T18:57:25Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:25Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T18:57:25Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:25Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"2b56982a-4dd9-4681-8997-0d414fe55985\\\",\\\"systemUUID\\\":\\\"798197c6-3029-4938-8b57-256852c71a3e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:25Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:25 crc kubenswrapper[4792]: I0929 18:57:25.596762 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:25 crc kubenswrapper[4792]: I0929 18:57:25.596821 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:25 crc kubenswrapper[4792]: I0929 18:57:25.596839 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:25 crc kubenswrapper[4792]: I0929 18:57:25.596898 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:25 crc kubenswrapper[4792]: I0929 18:57:25.596915 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:25Z","lastTransitionTime":"2025-09-29T18:57:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:25 crc kubenswrapper[4792]: E0929 18:57:25.618470 4792 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T18:57:25Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:25Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T18:57:25Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:25Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T18:57:25Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:25Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T18:57:25Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:25Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"2b56982a-4dd9-4681-8997-0d414fe55985\\\",\\\"systemUUID\\\":\\\"798197c6-3029-4938-8b57-256852c71a3e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:25Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:25 crc kubenswrapper[4792]: I0929 18:57:25.623606 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:25 crc kubenswrapper[4792]: I0929 18:57:25.623640 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:25 crc kubenswrapper[4792]: I0929 18:57:25.623653 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:25 crc kubenswrapper[4792]: I0929 18:57:25.623677 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:25 crc kubenswrapper[4792]: I0929 18:57:25.623691 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:25Z","lastTransitionTime":"2025-09-29T18:57:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:25 crc kubenswrapper[4792]: E0929 18:57:25.643910 4792 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T18:57:25Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:25Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T18:57:25Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:25Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T18:57:25Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:25Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T18:57:25Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:25Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"2b56982a-4dd9-4681-8997-0d414fe55985\\\",\\\"systemUUID\\\":\\\"798197c6-3029-4938-8b57-256852c71a3e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:25Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:25 crc kubenswrapper[4792]: I0929 18:57:25.649348 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:25 crc kubenswrapper[4792]: I0929 18:57:25.649382 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:25 crc kubenswrapper[4792]: I0929 18:57:25.649393 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:25 crc kubenswrapper[4792]: I0929 18:57:25.649415 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:25 crc kubenswrapper[4792]: I0929 18:57:25.649430 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:25Z","lastTransitionTime":"2025-09-29T18:57:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:25 crc kubenswrapper[4792]: E0929 18:57:25.669884 4792 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T18:57:25Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:25Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T18:57:25Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:25Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T18:57:25Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:25Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T18:57:25Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:25Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"2b56982a-4dd9-4681-8997-0d414fe55985\\\",\\\"systemUUID\\\":\\\"798197c6-3029-4938-8b57-256852c71a3e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:25Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:25 crc kubenswrapper[4792]: E0929 18:57:25.670132 4792 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Sep 29 18:57:25 crc kubenswrapper[4792]: I0929 18:57:25.672217 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:25 crc kubenswrapper[4792]: I0929 18:57:25.672276 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:25 crc kubenswrapper[4792]: I0929 18:57:25.672301 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:25 crc kubenswrapper[4792]: I0929 18:57:25.672350 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:25 crc kubenswrapper[4792]: I0929 18:57:25.672371 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:25Z","lastTransitionTime":"2025-09-29T18:57:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:25 crc kubenswrapper[4792]: I0929 18:57:25.775141 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:25 crc kubenswrapper[4792]: I0929 18:57:25.775218 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:25 crc kubenswrapper[4792]: I0929 18:57:25.775237 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:25 crc kubenswrapper[4792]: I0929 18:57:25.775265 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:25 crc kubenswrapper[4792]: I0929 18:57:25.775284 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:25Z","lastTransitionTime":"2025-09-29T18:57:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:25 crc kubenswrapper[4792]: I0929 18:57:25.881897 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:25 crc kubenswrapper[4792]: I0929 18:57:25.881973 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:25 crc kubenswrapper[4792]: I0929 18:57:25.882001 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:25 crc kubenswrapper[4792]: I0929 18:57:25.882035 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:25 crc kubenswrapper[4792]: I0929 18:57:25.882059 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:25Z","lastTransitionTime":"2025-09-29T18:57:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:25 crc kubenswrapper[4792]: I0929 18:57:25.986106 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:25 crc kubenswrapper[4792]: I0929 18:57:25.986163 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:25 crc kubenswrapper[4792]: I0929 18:57:25.986178 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:25 crc kubenswrapper[4792]: I0929 18:57:25.986203 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:25 crc kubenswrapper[4792]: I0929 18:57:25.986220 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:25Z","lastTransitionTime":"2025-09-29T18:57:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:26 crc kubenswrapper[4792]: I0929 18:57:26.089794 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:26 crc kubenswrapper[4792]: I0929 18:57:26.090073 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:26 crc kubenswrapper[4792]: I0929 18:57:26.090185 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:26 crc kubenswrapper[4792]: I0929 18:57:26.090222 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:26 crc kubenswrapper[4792]: I0929 18:57:26.090245 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:26Z","lastTransitionTime":"2025-09-29T18:57:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:26 crc kubenswrapper[4792]: I0929 18:57:26.193600 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:26 crc kubenswrapper[4792]: I0929 18:57:26.193672 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:26 crc kubenswrapper[4792]: I0929 18:57:26.193690 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:26 crc kubenswrapper[4792]: I0929 18:57:26.193717 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:26 crc kubenswrapper[4792]: I0929 18:57:26.193736 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:26Z","lastTransitionTime":"2025-09-29T18:57:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:26 crc kubenswrapper[4792]: I0929 18:57:26.297271 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:26 crc kubenswrapper[4792]: I0929 18:57:26.297325 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:26 crc kubenswrapper[4792]: I0929 18:57:26.297342 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:26 crc kubenswrapper[4792]: I0929 18:57:26.297366 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:26 crc kubenswrapper[4792]: I0929 18:57:26.297384 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:26Z","lastTransitionTime":"2025-09-29T18:57:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:26 crc kubenswrapper[4792]: I0929 18:57:26.402090 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:26 crc kubenswrapper[4792]: I0929 18:57:26.402162 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:26 crc kubenswrapper[4792]: I0929 18:57:26.402181 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:26 crc kubenswrapper[4792]: I0929 18:57:26.402215 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:26 crc kubenswrapper[4792]: I0929 18:57:26.402238 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:26Z","lastTransitionTime":"2025-09-29T18:57:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:26 crc kubenswrapper[4792]: I0929 18:57:26.506151 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:26 crc kubenswrapper[4792]: I0929 18:57:26.506220 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:26 crc kubenswrapper[4792]: I0929 18:57:26.506248 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:26 crc kubenswrapper[4792]: I0929 18:57:26.506282 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:26 crc kubenswrapper[4792]: I0929 18:57:26.506302 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:26Z","lastTransitionTime":"2025-09-29T18:57:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:26 crc kubenswrapper[4792]: I0929 18:57:26.609518 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:26 crc kubenswrapper[4792]: I0929 18:57:26.609592 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:26 crc kubenswrapper[4792]: I0929 18:57:26.609611 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:26 crc kubenswrapper[4792]: I0929 18:57:26.609641 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:26 crc kubenswrapper[4792]: I0929 18:57:26.609660 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:26Z","lastTransitionTime":"2025-09-29T18:57:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:26 crc kubenswrapper[4792]: I0929 18:57:26.713198 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:26 crc kubenswrapper[4792]: I0929 18:57:26.713268 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:26 crc kubenswrapper[4792]: I0929 18:57:26.713318 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:26 crc kubenswrapper[4792]: I0929 18:57:26.713343 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:26 crc kubenswrapper[4792]: I0929 18:57:26.713366 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:26Z","lastTransitionTime":"2025-09-29T18:57:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:26 crc kubenswrapper[4792]: I0929 18:57:26.817408 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:26 crc kubenswrapper[4792]: I0929 18:57:26.817475 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:26 crc kubenswrapper[4792]: I0929 18:57:26.817490 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:26 crc kubenswrapper[4792]: I0929 18:57:26.817548 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:26 crc kubenswrapper[4792]: I0929 18:57:26.817561 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:26Z","lastTransitionTime":"2025-09-29T18:57:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:26 crc kubenswrapper[4792]: I0929 18:57:26.920738 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:26 crc kubenswrapper[4792]: I0929 18:57:26.920804 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:26 crc kubenswrapper[4792]: I0929 18:57:26.920824 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:26 crc kubenswrapper[4792]: I0929 18:57:26.920919 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:26 crc kubenswrapper[4792]: I0929 18:57:26.920994 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:26Z","lastTransitionTime":"2025-09-29T18:57:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:27 crc kubenswrapper[4792]: I0929 18:57:27.014818 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-v5b2m" Sep 29 18:57:27 crc kubenswrapper[4792]: I0929 18:57:27.014818 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 18:57:27 crc kubenswrapper[4792]: I0929 18:57:27.015556 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 18:57:27 crc kubenswrapper[4792]: E0929 18:57:27.016054 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-v5b2m" podUID="fd292349-0e5a-4d80-b163-193aa43c98db" Sep 29 18:57:27 crc kubenswrapper[4792]: E0929 18:57:27.016363 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 18:57:27 crc kubenswrapper[4792]: E0929 18:57:27.017526 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 18:57:27 crc kubenswrapper[4792]: I0929 18:57:27.024491 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 18:57:27 crc kubenswrapper[4792]: I0929 18:57:27.025649 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:27 crc kubenswrapper[4792]: I0929 18:57:27.025730 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:27 crc kubenswrapper[4792]: I0929 18:57:27.025754 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:27 crc kubenswrapper[4792]: I0929 18:57:27.025789 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:27 crc kubenswrapper[4792]: I0929 18:57:27.025819 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:27Z","lastTransitionTime":"2025-09-29T18:57:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:27 crc kubenswrapper[4792]: E0929 18:57:27.026665 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 18:57:27 crc kubenswrapper[4792]: I0929 18:57:27.130018 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:27 crc kubenswrapper[4792]: I0929 18:57:27.130082 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:27 crc kubenswrapper[4792]: I0929 18:57:27.130106 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:27 crc kubenswrapper[4792]: I0929 18:57:27.130134 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:27 crc kubenswrapper[4792]: I0929 18:57:27.130156 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:27Z","lastTransitionTime":"2025-09-29T18:57:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:27 crc kubenswrapper[4792]: I0929 18:57:27.233749 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:27 crc kubenswrapper[4792]: I0929 18:57:27.233825 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:27 crc kubenswrapper[4792]: I0929 18:57:27.233843 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:27 crc kubenswrapper[4792]: I0929 18:57:27.233941 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:27 crc kubenswrapper[4792]: I0929 18:57:27.233969 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:27Z","lastTransitionTime":"2025-09-29T18:57:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:27 crc kubenswrapper[4792]: I0929 18:57:27.337201 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:27 crc kubenswrapper[4792]: I0929 18:57:27.337297 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:27 crc kubenswrapper[4792]: I0929 18:57:27.337317 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:27 crc kubenswrapper[4792]: I0929 18:57:27.337349 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:27 crc kubenswrapper[4792]: I0929 18:57:27.337370 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:27Z","lastTransitionTime":"2025-09-29T18:57:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:27 crc kubenswrapper[4792]: I0929 18:57:27.442653 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:27 crc kubenswrapper[4792]: I0929 18:57:27.442718 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:27 crc kubenswrapper[4792]: I0929 18:57:27.442733 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:27 crc kubenswrapper[4792]: I0929 18:57:27.442759 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:27 crc kubenswrapper[4792]: I0929 18:57:27.442779 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:27Z","lastTransitionTime":"2025-09-29T18:57:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:27 crc kubenswrapper[4792]: I0929 18:57:27.546592 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:27 crc kubenswrapper[4792]: I0929 18:57:27.546650 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:27 crc kubenswrapper[4792]: I0929 18:57:27.546659 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:27 crc kubenswrapper[4792]: I0929 18:57:27.546676 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:27 crc kubenswrapper[4792]: I0929 18:57:27.546687 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:27Z","lastTransitionTime":"2025-09-29T18:57:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:27 crc kubenswrapper[4792]: I0929 18:57:27.649899 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:27 crc kubenswrapper[4792]: I0929 18:57:27.649953 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:27 crc kubenswrapper[4792]: I0929 18:57:27.649970 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:27 crc kubenswrapper[4792]: I0929 18:57:27.649991 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:27 crc kubenswrapper[4792]: I0929 18:57:27.650007 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:27Z","lastTransitionTime":"2025-09-29T18:57:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:27 crc kubenswrapper[4792]: I0929 18:57:27.753817 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:27 crc kubenswrapper[4792]: I0929 18:57:27.753952 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:27 crc kubenswrapper[4792]: I0929 18:57:27.753978 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:27 crc kubenswrapper[4792]: I0929 18:57:27.754016 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:27 crc kubenswrapper[4792]: I0929 18:57:27.754040 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:27Z","lastTransitionTime":"2025-09-29T18:57:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:27 crc kubenswrapper[4792]: I0929 18:57:27.857686 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:27 crc kubenswrapper[4792]: I0929 18:57:27.857769 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:27 crc kubenswrapper[4792]: I0929 18:57:27.857791 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:27 crc kubenswrapper[4792]: I0929 18:57:27.857828 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:27 crc kubenswrapper[4792]: I0929 18:57:27.857950 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:27Z","lastTransitionTime":"2025-09-29T18:57:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:27 crc kubenswrapper[4792]: I0929 18:57:27.961071 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:27 crc kubenswrapper[4792]: I0929 18:57:27.961136 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:27 crc kubenswrapper[4792]: I0929 18:57:27.961149 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:27 crc kubenswrapper[4792]: I0929 18:57:27.961193 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:27 crc kubenswrapper[4792]: I0929 18:57:27.961206 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:27Z","lastTransitionTime":"2025-09-29T18:57:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:28 crc kubenswrapper[4792]: I0929 18:57:28.064409 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:28 crc kubenswrapper[4792]: I0929 18:57:28.064460 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:28 crc kubenswrapper[4792]: I0929 18:57:28.064474 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:28 crc kubenswrapper[4792]: I0929 18:57:28.064494 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:28 crc kubenswrapper[4792]: I0929 18:57:28.064508 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:28Z","lastTransitionTime":"2025-09-29T18:57:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:28 crc kubenswrapper[4792]: I0929 18:57:28.167162 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:28 crc kubenswrapper[4792]: I0929 18:57:28.167235 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:28 crc kubenswrapper[4792]: I0929 18:57:28.167253 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:28 crc kubenswrapper[4792]: I0929 18:57:28.167281 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:28 crc kubenswrapper[4792]: I0929 18:57:28.167303 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:28Z","lastTransitionTime":"2025-09-29T18:57:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:28 crc kubenswrapper[4792]: I0929 18:57:28.270480 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:28 crc kubenswrapper[4792]: I0929 18:57:28.270593 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:28 crc kubenswrapper[4792]: I0929 18:57:28.270614 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:28 crc kubenswrapper[4792]: I0929 18:57:28.270643 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:28 crc kubenswrapper[4792]: I0929 18:57:28.270664 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:28Z","lastTransitionTime":"2025-09-29T18:57:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:28 crc kubenswrapper[4792]: I0929 18:57:28.373752 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:28 crc kubenswrapper[4792]: I0929 18:57:28.373787 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:28 crc kubenswrapper[4792]: I0929 18:57:28.373796 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:28 crc kubenswrapper[4792]: I0929 18:57:28.373810 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:28 crc kubenswrapper[4792]: I0929 18:57:28.373819 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:28Z","lastTransitionTime":"2025-09-29T18:57:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:28 crc kubenswrapper[4792]: I0929 18:57:28.477205 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:28 crc kubenswrapper[4792]: I0929 18:57:28.477283 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:28 crc kubenswrapper[4792]: I0929 18:57:28.477324 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:28 crc kubenswrapper[4792]: I0929 18:57:28.477364 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:28 crc kubenswrapper[4792]: I0929 18:57:28.477386 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:28Z","lastTransitionTime":"2025-09-29T18:57:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:28 crc kubenswrapper[4792]: I0929 18:57:28.581271 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:28 crc kubenswrapper[4792]: I0929 18:57:28.581339 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:28 crc kubenswrapper[4792]: I0929 18:57:28.581356 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:28 crc kubenswrapper[4792]: I0929 18:57:28.581381 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:28 crc kubenswrapper[4792]: I0929 18:57:28.581399 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:28Z","lastTransitionTime":"2025-09-29T18:57:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:28 crc kubenswrapper[4792]: I0929 18:57:28.684401 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:28 crc kubenswrapper[4792]: I0929 18:57:28.684453 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:28 crc kubenswrapper[4792]: I0929 18:57:28.684471 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:28 crc kubenswrapper[4792]: I0929 18:57:28.684494 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:28 crc kubenswrapper[4792]: I0929 18:57:28.684648 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:28Z","lastTransitionTime":"2025-09-29T18:57:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:28 crc kubenswrapper[4792]: I0929 18:57:28.787504 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:28 crc kubenswrapper[4792]: I0929 18:57:28.787572 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:28 crc kubenswrapper[4792]: I0929 18:57:28.787591 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:28 crc kubenswrapper[4792]: I0929 18:57:28.787619 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:28 crc kubenswrapper[4792]: I0929 18:57:28.787637 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:28Z","lastTransitionTime":"2025-09-29T18:57:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:28 crc kubenswrapper[4792]: I0929 18:57:28.891057 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:28 crc kubenswrapper[4792]: I0929 18:57:28.891108 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:28 crc kubenswrapper[4792]: I0929 18:57:28.891125 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:28 crc kubenswrapper[4792]: I0929 18:57:28.891147 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:28 crc kubenswrapper[4792]: I0929 18:57:28.891164 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:28Z","lastTransitionTime":"2025-09-29T18:57:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:28 crc kubenswrapper[4792]: I0929 18:57:28.994128 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:28 crc kubenswrapper[4792]: I0929 18:57:28.994184 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:28 crc kubenswrapper[4792]: I0929 18:57:28.994200 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:28 crc kubenswrapper[4792]: I0929 18:57:28.994222 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:28 crc kubenswrapper[4792]: I0929 18:57:28.994237 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:28Z","lastTransitionTime":"2025-09-29T18:57:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:29 crc kubenswrapper[4792]: I0929 18:57:29.014649 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 18:57:29 crc kubenswrapper[4792]: I0929 18:57:29.014704 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 18:57:29 crc kubenswrapper[4792]: I0929 18:57:29.014735 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 18:57:29 crc kubenswrapper[4792]: I0929 18:57:29.014669 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-v5b2m" Sep 29 18:57:29 crc kubenswrapper[4792]: E0929 18:57:29.014936 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 18:57:29 crc kubenswrapper[4792]: E0929 18:57:29.015172 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-v5b2m" podUID="fd292349-0e5a-4d80-b163-193aa43c98db" Sep 29 18:57:29 crc kubenswrapper[4792]: E0929 18:57:29.015408 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 18:57:29 crc kubenswrapper[4792]: E0929 18:57:29.015607 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 18:57:29 crc kubenswrapper[4792]: I0929 18:57:29.036543 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"10bc9cb6-78d3-43a6-8276-db1cb1c116e0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://238035b6ad975064a44e7e9e760ae9f09c9ff2735ecc69f65df3fc3176f6d3b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://659f798faed7d5c35bf7959b8e42a37f2289854714d513962716cb73a0674a27\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://38d29fa86782b007a60cbc1ea8f2ccc250dabb38c84eb61931b05fbc170e6538\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f292b6e3b4a31cc851066c3112871836e2c896dea8913da0d3c5579fe5ebb65\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0fdb8c0687f0a5ce31078f6d7a9b643c41ad23199eff4b2878403ee5fd31f69f\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T18:56:45Z\\\",\\\"message\\\":\\\"9 18:56:45.310775 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0929 18:56:45.310981 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI0929 18:56:45.312802 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0929 18:56:45.312836 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0929 18:56:45.312870 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0929 18:56:45.312900 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0929 18:56:45.312941 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI0929 18:56:45.312944 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0929 18:56:45.312985 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI0929 18:56:45.313033 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI0929 18:56:45.313121 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-682747971/tls.crt::/tmp/serving-cert-682747971/tls.key\\\\\\\"\\\\nI0929 18:56:45.313163 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-682747971/tls.crt::/tmp/serving-cert-682747971/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1759172204\\\\\\\\\\\\\\\" (2025-09-29 18:56:43 +0000 UTC to 2025-10-29 18:56:44 +0000 UTC (now=2025-09-29 18:56:45.313121069 +0000 UTC))\\\\\\\"\\\\nF0929 18:56:45.313206 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:44Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:57:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://07171a8c3c7812c016ee534ff1332f697d2b2cdfa70fc9d94ae6a5f312e0e433\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:32Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6e733b73d0293211bf2e8e97dc7db49c34e8ac1ef1e6e19013183d8518345959\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6e733b73d0293211bf2e8e97dc7db49c34e8ac1ef1e6e19013183d8518345959\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T18:56:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:29Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:29Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:29 crc kubenswrapper[4792]: I0929 18:57:29.051365 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ba8b9fef5faf6504a0e363f092cc9f60b03723775a0a0624b6302b3dac43a7ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:29Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:29 crc kubenswrapper[4792]: I0929 18:57:29.071905 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8067c4cf598ce2d361c4a76b51ef3cf14d1fc84ad7ee193d76e20cd980f197be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://693ef3ee15f0b8762a16adc20435397e073dea4b0028f4175899cb956eaab303\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:29Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:29 crc kubenswrapper[4792]: I0929 18:57:29.096586 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:29 crc kubenswrapper[4792]: I0929 18:57:29.096694 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:29 crc kubenswrapper[4792]: I0929 18:57:29.096716 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:29 crc kubenswrapper[4792]: I0929 18:57:29.096744 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:29 crc kubenswrapper[4792]: I0929 18:57:29.096764 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:29Z","lastTransitionTime":"2025-09-29T18:57:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:29 crc kubenswrapper[4792]: I0929 18:57:29.104656 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-hr4cm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"716c5fdd-0e02-4066-9210-93d805b6fe81\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:53Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:53Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7b64445ce1e067504326c5005136522f885ba8796579cfb651019d2372a89173\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://46a3cf64e8fd5f5c75be0dd56175bd00e95e2780c73e39558e3b68ca1e6a44bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3408e50d82d1e7f50d9cd4fb2b4e078059bbc4daba10ca93c3cab56d4fe190be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a5023531ae972c8f19f5fbf8cdb3c4040f1b63d5d7b9d00e885607f0f84c88a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e9625b3628f291ecaa686da104d719695bd8c46eb46d08f9eccab27a2013627\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3a44c0899a9afeaa74bb22565c3f9514603ce1b83f9794539f677d067785925\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1121dcf30430fdbc7f6353dacdf1f0233d9053a2acfd8e7e248bc6e9faaf66e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1121dcf30430fdbc7f6353dacdf1f0233d9053a2acfd8e7e248bc6e9faaf66e2\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T18:57:20Z\\\",\\\"message\\\":\\\".861680 6354 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:20Z is after 2025-08-24T17:21:41Z]\\\\nI0929 18:57:20.861689 6354 base_network_controller_pods.go:477] [default/openshift-network-diagnostics/network-check-source-55646444c4-trplf] creating logical port openshift-network-diagnostics_network-check-source-55646444c4-trplf for pod on switch crc\\\\nI0929 18:57:20.861690 6354 services_controller.go:451] Built service openshift-kube-scheduler-operator/metrics cluster-wide LB for network=default: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-kube-scheduler-operator/metrics_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", E\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T18:57:20Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-hr4cm_openshift-ovn-kubernetes(716c5fdd-0e02-4066-9210-93d805b6fe81)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7138892e31e3d1949d0ae4789515fc0fd9868469eb14de1464a2f59786b85f08\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d0516004c2ea4a5711f5e00dcfa01fd5c8d0c0d0d60fd31b0d7da586cd83a820\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d0516004c2ea4a5711f5e00dcfa01fd5c8d0c0d0d60fd31b0d7da586cd83a820\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T18:56:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:53Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-hr4cm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:29Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:29 crc kubenswrapper[4792]: I0929 18:57:29.126395 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-5hwvp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"100876d3-2539-47f1-91fa-0f91456ccac1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3ff4d11cfba0349ddf3f5a14c525716cfdff95c71698634e8feca328d6e41e2d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfblz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:53Z\\\"}}\" for pod \"openshift-multus\"/\"multus-5hwvp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:29Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:29 crc kubenswrapper[4792]: I0929 18:57:29.143603 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4df360a8-146c-4e9e-8e52-498553bdf779\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1f4bce20bf980ce88d41df2f3c40acfc93739122e3173ea15ceb0122219338f8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d2e3b84a17bd084dfa82505af7e250aea64057db67fdf494d8653ba30a883b6d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c31040d1362524ea1a3ee9961c3eb7b97c7c76709465df7590129430ddd3bb4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://db5ac0d0d3d08ebd2278af02df1e4639df5c635bfee2ec6fb8293e18c648af76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://db5ac0d0d3d08ebd2278af02df1e4639df5c635bfee2ec6fb8293e18c648af76\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T18:56:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:30Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:29Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:29Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:29 crc kubenswrapper[4792]: I0929 18:57:29.157703 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:29Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:29 crc kubenswrapper[4792]: I0929 18:57:29.172773 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:29Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:29 crc kubenswrapper[4792]: I0929 18:57:29.182494 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-c228l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc16dcda-372e-4aac-8c12-148bf93e8783\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://af2529526423852e215c3201a4d8807a880e07e9cf71d593f304a4a3c99900eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6mz5l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:52Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-c228l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:29Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:29 crc kubenswrapper[4792]: I0929 18:57:29.194779 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-rr4g5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1762a3e4-6068-48d9-9b1d-bd5b893803bb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7d6bc8edd4388d2cc81a9741b23060a4982a85a1ac8ec23f2052436cff8cd7fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:57:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xnnkn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8053630e82cf267f19de300922033d2cade4b754707ca7c0d7fbfd7e4957eefc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:57:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xnnkn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:57:06Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-rr4g5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:29Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:29 crc kubenswrapper[4792]: I0929 18:57:29.198991 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:29 crc kubenswrapper[4792]: I0929 18:57:29.199019 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:29 crc kubenswrapper[4792]: I0929 18:57:29.199027 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:29 crc kubenswrapper[4792]: I0929 18:57:29.199039 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:29 crc kubenswrapper[4792]: I0929 18:57:29.199047 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:29Z","lastTransitionTime":"2025-09-29T18:57:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:29 crc kubenswrapper[4792]: I0929 18:57:29.210835 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:29Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:29 crc kubenswrapper[4792]: I0929 18:57:29.221527 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-4gmtk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b84b9e91-b50e-4271-bfc8-be15652128c5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b32084075b7423c8211ca56595a2eb11add581b500043804cb09f13d07788bd6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lc999\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:55Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-4gmtk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:29Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:29 crc kubenswrapper[4792]: I0929 18:57:29.234811 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"49187618-8fed-4b0f-bdf8-800408f708fc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://325b543480e9e1abd49c6ce98398a79ef51983b8035774b2e88447ee547733c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://12d3875b8db9620798f766024b1bc43b78759f42e467b67aaf87f0b0154a8fad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://100ab44da711fddded7f88aa053b6a47d1c8302557d9ae6a56d4f744140e34bd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://af34e705a941f92c031edf3d214a902640010036401914f60e598a46043d5eb3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:29Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:29Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:29 crc kubenswrapper[4792]: I0929 18:57:29.253359 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:52Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:52Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cb29207afd9a5fb06242890aaf6d32f2f789cbf824b0246706e7214486ac529c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:29Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:29 crc kubenswrapper[4792]: I0929 18:57:29.267900 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0ae66548-086e-4ca9-bd6f-281ce46e7557\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b27d8e307d9f6545acd48d9a838dc98fec84ca2e48b357966af22144b8cd415f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://305645f1f10b20984067c3d0d32bc9a5936e191faecff2bb494be005fc471c65\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:53Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-p5q59\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:29Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:29 crc kubenswrapper[4792]: I0929 18:57:29.282994 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-rqbjv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"67c58ee5-e056-4e3e-91ed-a116350f2408\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://17470ef608c6b717c0346349c1e72046e200b3879df2772778878b0e83c05b7e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:57:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ms9xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cdd799a430b3a444b7ba74ae8c285de28790049390a462485812fe117f9dfbe8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cdd799a430b3a444b7ba74ae8c285de28790049390a462485812fe117f9dfbe8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T18:56:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ms9xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1d779fed3cf67ba40d6664f26d829858ec14749c48c09678b73d7fb8fe73c827\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1d779fed3cf67ba40d6664f26d829858ec14749c48c09678b73d7fb8fe73c827\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T18:56:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ms9xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3094a1172df2fd98e699c4d368a14584e51bce43389c9c6432e24e78d460a3eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3094a1172df2fd98e699c4d368a14584e51bce43389c9c6432e24e78d460a3eb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T18:56:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ms9xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://79dfa5c03ec31df7b6477646c437b7490658801c0b8f7fac5e9149e4c7a882bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://79dfa5c03ec31df7b6477646c437b7490658801c0b8f7fac5e9149e4c7a882bf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T18:56:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ms9xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://aed6e427a87d4a4617a9d1c9a4d37cf2f9815d1759336026545d563b1f9b6976\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aed6e427a87d4a4617a9d1c9a4d37cf2f9815d1759336026545d563b1f9b6976\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T18:57:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ms9xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0830e9f46c282fc2be6beeea2654758eb0b3a0a86b802f495928c846bc49f7ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0830e9f46c282fc2be6beeea2654758eb0b3a0a86b802f495928c846bc49f7ed\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T18:57:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T18:57:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ms9xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:53Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-rqbjv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:29Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:29 crc kubenswrapper[4792]: I0929 18:57:29.294161 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-v5b2m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fd292349-0e5a-4d80-b163-193aa43c98db\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:07Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:07Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:07Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d8ps7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d8ps7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:57:07Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-v5b2m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:29Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:29 crc kubenswrapper[4792]: I0929 18:57:29.300763 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:29 crc kubenswrapper[4792]: I0929 18:57:29.300809 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:29 crc kubenswrapper[4792]: I0929 18:57:29.300819 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:29 crc kubenswrapper[4792]: I0929 18:57:29.300834 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:29 crc kubenswrapper[4792]: I0929 18:57:29.300845 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:29Z","lastTransitionTime":"2025-09-29T18:57:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:29 crc kubenswrapper[4792]: I0929 18:57:29.403455 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:29 crc kubenswrapper[4792]: I0929 18:57:29.403497 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:29 crc kubenswrapper[4792]: I0929 18:57:29.403507 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:29 crc kubenswrapper[4792]: I0929 18:57:29.403524 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:29 crc kubenswrapper[4792]: I0929 18:57:29.403532 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:29Z","lastTransitionTime":"2025-09-29T18:57:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:29 crc kubenswrapper[4792]: I0929 18:57:29.505799 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:29 crc kubenswrapper[4792]: I0929 18:57:29.505839 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:29 crc kubenswrapper[4792]: I0929 18:57:29.505866 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:29 crc kubenswrapper[4792]: I0929 18:57:29.505881 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:29 crc kubenswrapper[4792]: I0929 18:57:29.505893 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:29Z","lastTransitionTime":"2025-09-29T18:57:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:29 crc kubenswrapper[4792]: I0929 18:57:29.608326 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:29 crc kubenswrapper[4792]: I0929 18:57:29.608374 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:29 crc kubenswrapper[4792]: I0929 18:57:29.608391 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:29 crc kubenswrapper[4792]: I0929 18:57:29.608458 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:29 crc kubenswrapper[4792]: I0929 18:57:29.608493 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:29Z","lastTransitionTime":"2025-09-29T18:57:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:29 crc kubenswrapper[4792]: I0929 18:57:29.711949 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:29 crc kubenswrapper[4792]: I0929 18:57:29.712014 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:29 crc kubenswrapper[4792]: I0929 18:57:29.712036 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:29 crc kubenswrapper[4792]: I0929 18:57:29.712061 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:29 crc kubenswrapper[4792]: I0929 18:57:29.712079 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:29Z","lastTransitionTime":"2025-09-29T18:57:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:29 crc kubenswrapper[4792]: I0929 18:57:29.813802 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:29 crc kubenswrapper[4792]: I0929 18:57:29.813844 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:29 crc kubenswrapper[4792]: I0929 18:57:29.813872 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:29 crc kubenswrapper[4792]: I0929 18:57:29.813887 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:29 crc kubenswrapper[4792]: I0929 18:57:29.813898 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:29Z","lastTransitionTime":"2025-09-29T18:57:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:29 crc kubenswrapper[4792]: I0929 18:57:29.916306 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:29 crc kubenswrapper[4792]: I0929 18:57:29.916337 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:29 crc kubenswrapper[4792]: I0929 18:57:29.916349 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:29 crc kubenswrapper[4792]: I0929 18:57:29.916364 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:29 crc kubenswrapper[4792]: I0929 18:57:29.916373 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:29Z","lastTransitionTime":"2025-09-29T18:57:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:30 crc kubenswrapper[4792]: I0929 18:57:30.019357 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:30 crc kubenswrapper[4792]: I0929 18:57:30.019452 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:30 crc kubenswrapper[4792]: I0929 18:57:30.019469 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:30 crc kubenswrapper[4792]: I0929 18:57:30.019485 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:30 crc kubenswrapper[4792]: I0929 18:57:30.019542 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:30Z","lastTransitionTime":"2025-09-29T18:57:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:30 crc kubenswrapper[4792]: I0929 18:57:30.122302 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:30 crc kubenswrapper[4792]: I0929 18:57:30.122353 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:30 crc kubenswrapper[4792]: I0929 18:57:30.122367 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:30 crc kubenswrapper[4792]: I0929 18:57:30.122383 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:30 crc kubenswrapper[4792]: I0929 18:57:30.122399 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:30Z","lastTransitionTime":"2025-09-29T18:57:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:30 crc kubenswrapper[4792]: I0929 18:57:30.226286 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:30 crc kubenswrapper[4792]: I0929 18:57:30.226394 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:30 crc kubenswrapper[4792]: I0929 18:57:30.226416 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:30 crc kubenswrapper[4792]: I0929 18:57:30.226446 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:30 crc kubenswrapper[4792]: I0929 18:57:30.226467 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:30Z","lastTransitionTime":"2025-09-29T18:57:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:30 crc kubenswrapper[4792]: I0929 18:57:30.329508 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:30 crc kubenswrapper[4792]: I0929 18:57:30.329586 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:30 crc kubenswrapper[4792]: I0929 18:57:30.329603 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:30 crc kubenswrapper[4792]: I0929 18:57:30.329640 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:30 crc kubenswrapper[4792]: I0929 18:57:30.329661 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:30Z","lastTransitionTime":"2025-09-29T18:57:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:30 crc kubenswrapper[4792]: I0929 18:57:30.433461 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:30 crc kubenswrapper[4792]: I0929 18:57:30.433543 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:30 crc kubenswrapper[4792]: I0929 18:57:30.433564 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:30 crc kubenswrapper[4792]: I0929 18:57:30.433592 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:30 crc kubenswrapper[4792]: I0929 18:57:30.433617 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:30Z","lastTransitionTime":"2025-09-29T18:57:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:30 crc kubenswrapper[4792]: I0929 18:57:30.537295 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:30 crc kubenswrapper[4792]: I0929 18:57:30.537361 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:30 crc kubenswrapper[4792]: I0929 18:57:30.537377 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:30 crc kubenswrapper[4792]: I0929 18:57:30.537398 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:30 crc kubenswrapper[4792]: I0929 18:57:30.537410 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:30Z","lastTransitionTime":"2025-09-29T18:57:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:30 crc kubenswrapper[4792]: I0929 18:57:30.640721 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:30 crc kubenswrapper[4792]: I0929 18:57:30.640788 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:30 crc kubenswrapper[4792]: I0929 18:57:30.640806 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:30 crc kubenswrapper[4792]: I0929 18:57:30.640835 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:30 crc kubenswrapper[4792]: I0929 18:57:30.640882 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:30Z","lastTransitionTime":"2025-09-29T18:57:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:30 crc kubenswrapper[4792]: I0929 18:57:30.745192 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:30 crc kubenswrapper[4792]: I0929 18:57:30.745269 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:30 crc kubenswrapper[4792]: I0929 18:57:30.745287 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:30 crc kubenswrapper[4792]: I0929 18:57:30.745317 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:30 crc kubenswrapper[4792]: I0929 18:57:30.745339 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:30Z","lastTransitionTime":"2025-09-29T18:57:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:30 crc kubenswrapper[4792]: I0929 18:57:30.849130 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:30 crc kubenswrapper[4792]: I0929 18:57:30.849206 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:30 crc kubenswrapper[4792]: I0929 18:57:30.849231 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:30 crc kubenswrapper[4792]: I0929 18:57:30.849262 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:30 crc kubenswrapper[4792]: I0929 18:57:30.849289 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:30Z","lastTransitionTime":"2025-09-29T18:57:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:30 crc kubenswrapper[4792]: I0929 18:57:30.952716 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:30 crc kubenswrapper[4792]: I0929 18:57:30.952768 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:30 crc kubenswrapper[4792]: I0929 18:57:30.952785 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:30 crc kubenswrapper[4792]: I0929 18:57:30.952809 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:30 crc kubenswrapper[4792]: I0929 18:57:30.952826 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:30Z","lastTransitionTime":"2025-09-29T18:57:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:31 crc kubenswrapper[4792]: I0929 18:57:31.015310 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 18:57:31 crc kubenswrapper[4792]: I0929 18:57:31.015353 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 18:57:31 crc kubenswrapper[4792]: I0929 18:57:31.015310 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 18:57:31 crc kubenswrapper[4792]: E0929 18:57:31.015441 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 18:57:31 crc kubenswrapper[4792]: E0929 18:57:31.015561 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 18:57:31 crc kubenswrapper[4792]: I0929 18:57:31.015560 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-v5b2m" Sep 29 18:57:31 crc kubenswrapper[4792]: E0929 18:57:31.015751 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 18:57:31 crc kubenswrapper[4792]: E0929 18:57:31.015785 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-v5b2m" podUID="fd292349-0e5a-4d80-b163-193aa43c98db" Sep 29 18:57:31 crc kubenswrapper[4792]: I0929 18:57:31.056343 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:31 crc kubenswrapper[4792]: I0929 18:57:31.056731 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:31 crc kubenswrapper[4792]: I0929 18:57:31.056833 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:31 crc kubenswrapper[4792]: I0929 18:57:31.056964 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:31 crc kubenswrapper[4792]: I0929 18:57:31.058713 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:31Z","lastTransitionTime":"2025-09-29T18:57:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:31 crc kubenswrapper[4792]: I0929 18:57:31.162055 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:31 crc kubenswrapper[4792]: I0929 18:57:31.163017 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:31 crc kubenswrapper[4792]: I0929 18:57:31.163055 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:31 crc kubenswrapper[4792]: I0929 18:57:31.163085 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:31 crc kubenswrapper[4792]: I0929 18:57:31.163103 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:31Z","lastTransitionTime":"2025-09-29T18:57:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:31 crc kubenswrapper[4792]: I0929 18:57:31.267154 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:31 crc kubenswrapper[4792]: I0929 18:57:31.267256 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:31 crc kubenswrapper[4792]: I0929 18:57:31.267276 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:31 crc kubenswrapper[4792]: I0929 18:57:31.267303 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:31 crc kubenswrapper[4792]: I0929 18:57:31.267321 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:31Z","lastTransitionTime":"2025-09-29T18:57:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:31 crc kubenswrapper[4792]: I0929 18:57:31.372141 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:31 crc kubenswrapper[4792]: I0929 18:57:31.372222 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:31 crc kubenswrapper[4792]: I0929 18:57:31.372235 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:31 crc kubenswrapper[4792]: I0929 18:57:31.372262 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:31 crc kubenswrapper[4792]: I0929 18:57:31.372277 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:31Z","lastTransitionTime":"2025-09-29T18:57:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:31 crc kubenswrapper[4792]: I0929 18:57:31.477260 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:31 crc kubenswrapper[4792]: I0929 18:57:31.477816 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:31 crc kubenswrapper[4792]: I0929 18:57:31.478028 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:31 crc kubenswrapper[4792]: I0929 18:57:31.478241 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:31 crc kubenswrapper[4792]: I0929 18:57:31.478426 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:31Z","lastTransitionTime":"2025-09-29T18:57:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:31 crc kubenswrapper[4792]: I0929 18:57:31.582060 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:31 crc kubenswrapper[4792]: I0929 18:57:31.582142 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:31 crc kubenswrapper[4792]: I0929 18:57:31.582166 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:31 crc kubenswrapper[4792]: I0929 18:57:31.582195 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:31 crc kubenswrapper[4792]: I0929 18:57:31.582219 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:31Z","lastTransitionTime":"2025-09-29T18:57:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:31 crc kubenswrapper[4792]: I0929 18:57:31.685062 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:31 crc kubenswrapper[4792]: I0929 18:57:31.685115 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:31 crc kubenswrapper[4792]: I0929 18:57:31.685133 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:31 crc kubenswrapper[4792]: I0929 18:57:31.685157 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:31 crc kubenswrapper[4792]: I0929 18:57:31.685174 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:31Z","lastTransitionTime":"2025-09-29T18:57:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:31 crc kubenswrapper[4792]: I0929 18:57:31.787975 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:31 crc kubenswrapper[4792]: I0929 18:57:31.788035 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:31 crc kubenswrapper[4792]: I0929 18:57:31.788052 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:31 crc kubenswrapper[4792]: I0929 18:57:31.788075 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:31 crc kubenswrapper[4792]: I0929 18:57:31.788096 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:31Z","lastTransitionTime":"2025-09-29T18:57:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:31 crc kubenswrapper[4792]: I0929 18:57:31.891740 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:31 crc kubenswrapper[4792]: I0929 18:57:31.892081 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:31 crc kubenswrapper[4792]: I0929 18:57:31.892099 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:31 crc kubenswrapper[4792]: I0929 18:57:31.892124 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:31 crc kubenswrapper[4792]: I0929 18:57:31.892142 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:31Z","lastTransitionTime":"2025-09-29T18:57:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:31 crc kubenswrapper[4792]: I0929 18:57:31.994204 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:31 crc kubenswrapper[4792]: I0929 18:57:31.994231 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:31 crc kubenswrapper[4792]: I0929 18:57:31.994239 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:31 crc kubenswrapper[4792]: I0929 18:57:31.994252 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:31 crc kubenswrapper[4792]: I0929 18:57:31.994260 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:31Z","lastTransitionTime":"2025-09-29T18:57:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:32 crc kubenswrapper[4792]: I0929 18:57:32.097048 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:32 crc kubenswrapper[4792]: I0929 18:57:32.097105 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:32 crc kubenswrapper[4792]: I0929 18:57:32.097123 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:32 crc kubenswrapper[4792]: I0929 18:57:32.097148 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:32 crc kubenswrapper[4792]: I0929 18:57:32.097167 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:32Z","lastTransitionTime":"2025-09-29T18:57:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:32 crc kubenswrapper[4792]: I0929 18:57:32.200202 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:32 crc kubenswrapper[4792]: I0929 18:57:32.200257 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:32 crc kubenswrapper[4792]: I0929 18:57:32.200271 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:32 crc kubenswrapper[4792]: I0929 18:57:32.200293 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:32 crc kubenswrapper[4792]: I0929 18:57:32.200385 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:32Z","lastTransitionTime":"2025-09-29T18:57:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:32 crc kubenswrapper[4792]: I0929 18:57:32.304498 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:32 crc kubenswrapper[4792]: I0929 18:57:32.304558 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:32 crc kubenswrapper[4792]: I0929 18:57:32.304576 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:32 crc kubenswrapper[4792]: I0929 18:57:32.304601 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:32 crc kubenswrapper[4792]: I0929 18:57:32.304622 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:32Z","lastTransitionTime":"2025-09-29T18:57:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:32 crc kubenswrapper[4792]: I0929 18:57:32.408126 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:32 crc kubenswrapper[4792]: I0929 18:57:32.408210 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:32 crc kubenswrapper[4792]: I0929 18:57:32.408228 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:32 crc kubenswrapper[4792]: I0929 18:57:32.408259 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:32 crc kubenswrapper[4792]: I0929 18:57:32.408280 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:32Z","lastTransitionTime":"2025-09-29T18:57:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:32 crc kubenswrapper[4792]: I0929 18:57:32.512709 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:32 crc kubenswrapper[4792]: I0929 18:57:32.512782 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:32 crc kubenswrapper[4792]: I0929 18:57:32.512801 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:32 crc kubenswrapper[4792]: I0929 18:57:32.512828 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:32 crc kubenswrapper[4792]: I0929 18:57:32.512875 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:32Z","lastTransitionTime":"2025-09-29T18:57:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:32 crc kubenswrapper[4792]: I0929 18:57:32.616086 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:32 crc kubenswrapper[4792]: I0929 18:57:32.616153 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:32 crc kubenswrapper[4792]: I0929 18:57:32.616171 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:32 crc kubenswrapper[4792]: I0929 18:57:32.616199 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:32 crc kubenswrapper[4792]: I0929 18:57:32.616218 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:32Z","lastTransitionTime":"2025-09-29T18:57:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:32 crc kubenswrapper[4792]: I0929 18:57:32.719558 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:32 crc kubenswrapper[4792]: I0929 18:57:32.719626 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:32 crc kubenswrapper[4792]: I0929 18:57:32.719641 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:32 crc kubenswrapper[4792]: I0929 18:57:32.719661 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:32 crc kubenswrapper[4792]: I0929 18:57:32.719682 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:32Z","lastTransitionTime":"2025-09-29T18:57:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:32 crc kubenswrapper[4792]: I0929 18:57:32.823563 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:32 crc kubenswrapper[4792]: I0929 18:57:32.824026 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:32 crc kubenswrapper[4792]: I0929 18:57:32.824097 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:32 crc kubenswrapper[4792]: I0929 18:57:32.824174 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:32 crc kubenswrapper[4792]: I0929 18:57:32.824246 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:32Z","lastTransitionTime":"2025-09-29T18:57:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:32 crc kubenswrapper[4792]: I0929 18:57:32.927892 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:32 crc kubenswrapper[4792]: I0929 18:57:32.927927 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:32 crc kubenswrapper[4792]: I0929 18:57:32.927937 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:32 crc kubenswrapper[4792]: I0929 18:57:32.927951 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:32 crc kubenswrapper[4792]: I0929 18:57:32.927962 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:32Z","lastTransitionTime":"2025-09-29T18:57:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:33 crc kubenswrapper[4792]: I0929 18:57:33.014956 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-v5b2m" Sep 29 18:57:33 crc kubenswrapper[4792]: I0929 18:57:33.015019 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 18:57:33 crc kubenswrapper[4792]: E0929 18:57:33.015105 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-v5b2m" podUID="fd292349-0e5a-4d80-b163-193aa43c98db" Sep 29 18:57:33 crc kubenswrapper[4792]: I0929 18:57:33.015105 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 18:57:33 crc kubenswrapper[4792]: E0929 18:57:33.015153 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 18:57:33 crc kubenswrapper[4792]: I0929 18:57:33.014976 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 18:57:33 crc kubenswrapper[4792]: E0929 18:57:33.015341 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 18:57:33 crc kubenswrapper[4792]: E0929 18:57:33.015439 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 18:57:33 crc kubenswrapper[4792]: I0929 18:57:33.030587 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:33 crc kubenswrapper[4792]: I0929 18:57:33.030836 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:33 crc kubenswrapper[4792]: I0929 18:57:33.030933 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:33 crc kubenswrapper[4792]: I0929 18:57:33.031011 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:33 crc kubenswrapper[4792]: I0929 18:57:33.031088 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:33Z","lastTransitionTime":"2025-09-29T18:57:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:33 crc kubenswrapper[4792]: I0929 18:57:33.133643 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:33 crc kubenswrapper[4792]: I0929 18:57:33.133710 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:33 crc kubenswrapper[4792]: I0929 18:57:33.133724 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:33 crc kubenswrapper[4792]: I0929 18:57:33.133747 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:33 crc kubenswrapper[4792]: I0929 18:57:33.133762 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:33Z","lastTransitionTime":"2025-09-29T18:57:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:33 crc kubenswrapper[4792]: I0929 18:57:33.236318 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:33 crc kubenswrapper[4792]: I0929 18:57:33.237339 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:33 crc kubenswrapper[4792]: I0929 18:57:33.237552 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:33 crc kubenswrapper[4792]: I0929 18:57:33.237731 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:33 crc kubenswrapper[4792]: I0929 18:57:33.237927 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:33Z","lastTransitionTime":"2025-09-29T18:57:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:33 crc kubenswrapper[4792]: I0929 18:57:33.340223 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:33 crc kubenswrapper[4792]: I0929 18:57:33.340264 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:33 crc kubenswrapper[4792]: I0929 18:57:33.340394 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:33 crc kubenswrapper[4792]: I0929 18:57:33.340412 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:33 crc kubenswrapper[4792]: I0929 18:57:33.340422 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:33Z","lastTransitionTime":"2025-09-29T18:57:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:33 crc kubenswrapper[4792]: I0929 18:57:33.443449 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:33 crc kubenswrapper[4792]: I0929 18:57:33.443763 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:33 crc kubenswrapper[4792]: I0929 18:57:33.443902 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:33 crc kubenswrapper[4792]: I0929 18:57:33.444011 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:33 crc kubenswrapper[4792]: I0929 18:57:33.444117 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:33Z","lastTransitionTime":"2025-09-29T18:57:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:33 crc kubenswrapper[4792]: I0929 18:57:33.547284 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:33 crc kubenswrapper[4792]: I0929 18:57:33.547333 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:33 crc kubenswrapper[4792]: I0929 18:57:33.547346 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:33 crc kubenswrapper[4792]: I0929 18:57:33.547368 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:33 crc kubenswrapper[4792]: I0929 18:57:33.547383 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:33Z","lastTransitionTime":"2025-09-29T18:57:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:33 crc kubenswrapper[4792]: I0929 18:57:33.650830 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:33 crc kubenswrapper[4792]: I0929 18:57:33.650899 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:33 crc kubenswrapper[4792]: I0929 18:57:33.650910 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:33 crc kubenswrapper[4792]: I0929 18:57:33.650927 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:33 crc kubenswrapper[4792]: I0929 18:57:33.650940 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:33Z","lastTransitionTime":"2025-09-29T18:57:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:33 crc kubenswrapper[4792]: I0929 18:57:33.753663 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:33 crc kubenswrapper[4792]: I0929 18:57:33.754054 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:33 crc kubenswrapper[4792]: I0929 18:57:33.754145 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:33 crc kubenswrapper[4792]: I0929 18:57:33.754235 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:33 crc kubenswrapper[4792]: I0929 18:57:33.754310 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:33Z","lastTransitionTime":"2025-09-29T18:57:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:33 crc kubenswrapper[4792]: I0929 18:57:33.856382 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:33 crc kubenswrapper[4792]: I0929 18:57:33.856417 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:33 crc kubenswrapper[4792]: I0929 18:57:33.856427 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:33 crc kubenswrapper[4792]: I0929 18:57:33.856442 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:33 crc kubenswrapper[4792]: I0929 18:57:33.856452 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:33Z","lastTransitionTime":"2025-09-29T18:57:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:33 crc kubenswrapper[4792]: I0929 18:57:33.959905 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:33 crc kubenswrapper[4792]: I0929 18:57:33.959956 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:33 crc kubenswrapper[4792]: I0929 18:57:33.959965 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:33 crc kubenswrapper[4792]: I0929 18:57:33.959982 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:33 crc kubenswrapper[4792]: I0929 18:57:33.959992 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:33Z","lastTransitionTime":"2025-09-29T18:57:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:34 crc kubenswrapper[4792]: I0929 18:57:34.062381 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:34 crc kubenswrapper[4792]: I0929 18:57:34.062429 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:34 crc kubenswrapper[4792]: I0929 18:57:34.062439 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:34 crc kubenswrapper[4792]: I0929 18:57:34.062457 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:34 crc kubenswrapper[4792]: I0929 18:57:34.062469 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:34Z","lastTransitionTime":"2025-09-29T18:57:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:34 crc kubenswrapper[4792]: I0929 18:57:34.164930 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:34 crc kubenswrapper[4792]: I0929 18:57:34.165012 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:34 crc kubenswrapper[4792]: I0929 18:57:34.165029 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:34 crc kubenswrapper[4792]: I0929 18:57:34.165055 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:34 crc kubenswrapper[4792]: I0929 18:57:34.165076 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:34Z","lastTransitionTime":"2025-09-29T18:57:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:34 crc kubenswrapper[4792]: I0929 18:57:34.267693 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:34 crc kubenswrapper[4792]: I0929 18:57:34.267740 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:34 crc kubenswrapper[4792]: I0929 18:57:34.267749 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:34 crc kubenswrapper[4792]: I0929 18:57:34.267763 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:34 crc kubenswrapper[4792]: I0929 18:57:34.267772 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:34Z","lastTransitionTime":"2025-09-29T18:57:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:34 crc kubenswrapper[4792]: I0929 18:57:34.370441 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:34 crc kubenswrapper[4792]: I0929 18:57:34.370496 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:34 crc kubenswrapper[4792]: I0929 18:57:34.370508 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:34 crc kubenswrapper[4792]: I0929 18:57:34.370529 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:34 crc kubenswrapper[4792]: I0929 18:57:34.370543 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:34Z","lastTransitionTime":"2025-09-29T18:57:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:34 crc kubenswrapper[4792]: I0929 18:57:34.472687 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:34 crc kubenswrapper[4792]: I0929 18:57:34.472739 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:34 crc kubenswrapper[4792]: I0929 18:57:34.472753 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:34 crc kubenswrapper[4792]: I0929 18:57:34.472768 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:34 crc kubenswrapper[4792]: I0929 18:57:34.472782 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:34Z","lastTransitionTime":"2025-09-29T18:57:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:34 crc kubenswrapper[4792]: I0929 18:57:34.576138 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:34 crc kubenswrapper[4792]: I0929 18:57:34.576178 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:34 crc kubenswrapper[4792]: I0929 18:57:34.576191 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:34 crc kubenswrapper[4792]: I0929 18:57:34.576215 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:34 crc kubenswrapper[4792]: I0929 18:57:34.576230 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:34Z","lastTransitionTime":"2025-09-29T18:57:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:34 crc kubenswrapper[4792]: I0929 18:57:34.679134 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:34 crc kubenswrapper[4792]: I0929 18:57:34.679172 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:34 crc kubenswrapper[4792]: I0929 18:57:34.679183 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:34 crc kubenswrapper[4792]: I0929 18:57:34.679199 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:34 crc kubenswrapper[4792]: I0929 18:57:34.679208 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:34Z","lastTransitionTime":"2025-09-29T18:57:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:34 crc kubenswrapper[4792]: I0929 18:57:34.781264 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:34 crc kubenswrapper[4792]: I0929 18:57:34.781292 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:34 crc kubenswrapper[4792]: I0929 18:57:34.781300 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:34 crc kubenswrapper[4792]: I0929 18:57:34.781313 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:34 crc kubenswrapper[4792]: I0929 18:57:34.781321 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:34Z","lastTransitionTime":"2025-09-29T18:57:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:34 crc kubenswrapper[4792]: I0929 18:57:34.883392 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:34 crc kubenswrapper[4792]: I0929 18:57:34.883433 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:34 crc kubenswrapper[4792]: I0929 18:57:34.883443 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:34 crc kubenswrapper[4792]: I0929 18:57:34.883458 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:34 crc kubenswrapper[4792]: I0929 18:57:34.883469 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:34Z","lastTransitionTime":"2025-09-29T18:57:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:34 crc kubenswrapper[4792]: I0929 18:57:34.986140 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:34 crc kubenswrapper[4792]: I0929 18:57:34.986192 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:34 crc kubenswrapper[4792]: I0929 18:57:34.986207 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:34 crc kubenswrapper[4792]: I0929 18:57:34.986225 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:34 crc kubenswrapper[4792]: I0929 18:57:34.986236 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:34Z","lastTransitionTime":"2025-09-29T18:57:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:35 crc kubenswrapper[4792]: I0929 18:57:35.015254 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 18:57:35 crc kubenswrapper[4792]: I0929 18:57:35.015296 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 18:57:35 crc kubenswrapper[4792]: I0929 18:57:35.015261 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 18:57:35 crc kubenswrapper[4792]: I0929 18:57:35.015260 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-v5b2m" Sep 29 18:57:35 crc kubenswrapper[4792]: E0929 18:57:35.015472 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 18:57:35 crc kubenswrapper[4792]: E0929 18:57:35.015605 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 18:57:35 crc kubenswrapper[4792]: E0929 18:57:35.015689 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 18:57:35 crc kubenswrapper[4792]: E0929 18:57:35.015776 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-v5b2m" podUID="fd292349-0e5a-4d80-b163-193aa43c98db" Sep 29 18:57:35 crc kubenswrapper[4792]: I0929 18:57:35.088013 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:35 crc kubenswrapper[4792]: I0929 18:57:35.088061 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:35 crc kubenswrapper[4792]: I0929 18:57:35.088073 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:35 crc kubenswrapper[4792]: I0929 18:57:35.088091 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:35 crc kubenswrapper[4792]: I0929 18:57:35.088101 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:35Z","lastTransitionTime":"2025-09-29T18:57:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:35 crc kubenswrapper[4792]: I0929 18:57:35.190575 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:35 crc kubenswrapper[4792]: I0929 18:57:35.190628 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:35 crc kubenswrapper[4792]: I0929 18:57:35.190641 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:35 crc kubenswrapper[4792]: I0929 18:57:35.190657 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:35 crc kubenswrapper[4792]: I0929 18:57:35.190669 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:35Z","lastTransitionTime":"2025-09-29T18:57:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:35 crc kubenswrapper[4792]: I0929 18:57:35.292594 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:35 crc kubenswrapper[4792]: I0929 18:57:35.292638 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:35 crc kubenswrapper[4792]: I0929 18:57:35.292651 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:35 crc kubenswrapper[4792]: I0929 18:57:35.292667 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:35 crc kubenswrapper[4792]: I0929 18:57:35.292677 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:35Z","lastTransitionTime":"2025-09-29T18:57:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:35 crc kubenswrapper[4792]: I0929 18:57:35.394263 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:35 crc kubenswrapper[4792]: I0929 18:57:35.394301 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:35 crc kubenswrapper[4792]: I0929 18:57:35.394309 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:35 crc kubenswrapper[4792]: I0929 18:57:35.394322 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:35 crc kubenswrapper[4792]: I0929 18:57:35.394331 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:35Z","lastTransitionTime":"2025-09-29T18:57:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:35 crc kubenswrapper[4792]: I0929 18:57:35.496379 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:35 crc kubenswrapper[4792]: I0929 18:57:35.496426 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:35 crc kubenswrapper[4792]: I0929 18:57:35.496434 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:35 crc kubenswrapper[4792]: I0929 18:57:35.496448 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:35 crc kubenswrapper[4792]: I0929 18:57:35.496459 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:35Z","lastTransitionTime":"2025-09-29T18:57:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:35 crc kubenswrapper[4792]: I0929 18:57:35.599441 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:35 crc kubenswrapper[4792]: I0929 18:57:35.599508 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:35 crc kubenswrapper[4792]: I0929 18:57:35.599520 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:35 crc kubenswrapper[4792]: I0929 18:57:35.599548 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:35 crc kubenswrapper[4792]: I0929 18:57:35.599566 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:35Z","lastTransitionTime":"2025-09-29T18:57:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:35 crc kubenswrapper[4792]: I0929 18:57:35.680954 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:35 crc kubenswrapper[4792]: I0929 18:57:35.680990 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:35 crc kubenswrapper[4792]: I0929 18:57:35.680998 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:35 crc kubenswrapper[4792]: I0929 18:57:35.681011 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:35 crc kubenswrapper[4792]: I0929 18:57:35.681021 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:35Z","lastTransitionTime":"2025-09-29T18:57:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:35 crc kubenswrapper[4792]: E0929 18:57:35.700950 4792 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T18:57:35Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:35Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T18:57:35Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:35Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T18:57:35Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:35Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T18:57:35Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:35Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"2b56982a-4dd9-4681-8997-0d414fe55985\\\",\\\"systemUUID\\\":\\\"798197c6-3029-4938-8b57-256852c71a3e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:35Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:35 crc kubenswrapper[4792]: I0929 18:57:35.705983 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:35 crc kubenswrapper[4792]: I0929 18:57:35.706012 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:35 crc kubenswrapper[4792]: I0929 18:57:35.706020 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:35 crc kubenswrapper[4792]: I0929 18:57:35.706033 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:35 crc kubenswrapper[4792]: I0929 18:57:35.706043 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:35Z","lastTransitionTime":"2025-09-29T18:57:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:35 crc kubenswrapper[4792]: E0929 18:57:35.720190 4792 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T18:57:35Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:35Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T18:57:35Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:35Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T18:57:35Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:35Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T18:57:35Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:35Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"2b56982a-4dd9-4681-8997-0d414fe55985\\\",\\\"systemUUID\\\":\\\"798197c6-3029-4938-8b57-256852c71a3e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:35Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:35 crc kubenswrapper[4792]: I0929 18:57:35.725305 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:35 crc kubenswrapper[4792]: I0929 18:57:35.725343 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:35 crc kubenswrapper[4792]: I0929 18:57:35.725358 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:35 crc kubenswrapper[4792]: I0929 18:57:35.725379 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:35 crc kubenswrapper[4792]: I0929 18:57:35.725394 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:35Z","lastTransitionTime":"2025-09-29T18:57:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:35 crc kubenswrapper[4792]: E0929 18:57:35.740104 4792 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T18:57:35Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:35Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T18:57:35Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:35Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T18:57:35Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:35Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T18:57:35Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:35Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"2b56982a-4dd9-4681-8997-0d414fe55985\\\",\\\"systemUUID\\\":\\\"798197c6-3029-4938-8b57-256852c71a3e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:35Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:35 crc kubenswrapper[4792]: I0929 18:57:35.744328 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:35 crc kubenswrapper[4792]: I0929 18:57:35.744370 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:35 crc kubenswrapper[4792]: I0929 18:57:35.744390 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:35 crc kubenswrapper[4792]: I0929 18:57:35.744412 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:35 crc kubenswrapper[4792]: I0929 18:57:35.744429 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:35Z","lastTransitionTime":"2025-09-29T18:57:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:35 crc kubenswrapper[4792]: E0929 18:57:35.760410 4792 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T18:57:35Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:35Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T18:57:35Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:35Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T18:57:35Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:35Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T18:57:35Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:35Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"2b56982a-4dd9-4681-8997-0d414fe55985\\\",\\\"systemUUID\\\":\\\"798197c6-3029-4938-8b57-256852c71a3e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:35Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:35 crc kubenswrapper[4792]: I0929 18:57:35.764090 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:35 crc kubenswrapper[4792]: I0929 18:57:35.764162 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:35 crc kubenswrapper[4792]: I0929 18:57:35.764179 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:35 crc kubenswrapper[4792]: I0929 18:57:35.764200 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:35 crc kubenswrapper[4792]: I0929 18:57:35.764215 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:35Z","lastTransitionTime":"2025-09-29T18:57:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:35 crc kubenswrapper[4792]: E0929 18:57:35.777339 4792 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T18:57:35Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:35Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T18:57:35Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:35Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T18:57:35Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:35Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T18:57:35Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:35Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"2b56982a-4dd9-4681-8997-0d414fe55985\\\",\\\"systemUUID\\\":\\\"798197c6-3029-4938-8b57-256852c71a3e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:35Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:35 crc kubenswrapper[4792]: E0929 18:57:35.777468 4792 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Sep 29 18:57:35 crc kubenswrapper[4792]: I0929 18:57:35.779772 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:35 crc kubenswrapper[4792]: I0929 18:57:35.779806 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:35 crc kubenswrapper[4792]: I0929 18:57:35.779815 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:35 crc kubenswrapper[4792]: I0929 18:57:35.779832 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:35 crc kubenswrapper[4792]: I0929 18:57:35.779843 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:35Z","lastTransitionTime":"2025-09-29T18:57:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:35 crc kubenswrapper[4792]: I0929 18:57:35.882403 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:35 crc kubenswrapper[4792]: I0929 18:57:35.882504 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:35 crc kubenswrapper[4792]: I0929 18:57:35.882520 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:35 crc kubenswrapper[4792]: I0929 18:57:35.882543 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:35 crc kubenswrapper[4792]: I0929 18:57:35.882557 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:35Z","lastTransitionTime":"2025-09-29T18:57:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:35 crc kubenswrapper[4792]: I0929 18:57:35.986114 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:35 crc kubenswrapper[4792]: I0929 18:57:35.986166 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:35 crc kubenswrapper[4792]: I0929 18:57:35.986182 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:35 crc kubenswrapper[4792]: I0929 18:57:35.986203 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:35 crc kubenswrapper[4792]: I0929 18:57:35.986219 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:35Z","lastTransitionTime":"2025-09-29T18:57:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:36 crc kubenswrapper[4792]: I0929 18:57:36.015524 4792 scope.go:117] "RemoveContainer" containerID="1121dcf30430fdbc7f6353dacdf1f0233d9053a2acfd8e7e248bc6e9faaf66e2" Sep 29 18:57:36 crc kubenswrapper[4792]: E0929 18:57:36.015885 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-hr4cm_openshift-ovn-kubernetes(716c5fdd-0e02-4066-9210-93d805b6fe81)\"" pod="openshift-ovn-kubernetes/ovnkube-node-hr4cm" podUID="716c5fdd-0e02-4066-9210-93d805b6fe81" Sep 29 18:57:36 crc kubenswrapper[4792]: I0929 18:57:36.089128 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:36 crc kubenswrapper[4792]: I0929 18:57:36.089177 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:36 crc kubenswrapper[4792]: I0929 18:57:36.089189 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:36 crc kubenswrapper[4792]: I0929 18:57:36.089206 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:36 crc kubenswrapper[4792]: I0929 18:57:36.089219 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:36Z","lastTransitionTime":"2025-09-29T18:57:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:36 crc kubenswrapper[4792]: I0929 18:57:36.192641 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:36 crc kubenswrapper[4792]: I0929 18:57:36.192704 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:36 crc kubenswrapper[4792]: I0929 18:57:36.192722 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:36 crc kubenswrapper[4792]: I0929 18:57:36.192747 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:36 crc kubenswrapper[4792]: I0929 18:57:36.192763 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:36Z","lastTransitionTime":"2025-09-29T18:57:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:36 crc kubenswrapper[4792]: I0929 18:57:36.296009 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:36 crc kubenswrapper[4792]: I0929 18:57:36.296057 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:36 crc kubenswrapper[4792]: I0929 18:57:36.296072 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:36 crc kubenswrapper[4792]: I0929 18:57:36.296093 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:36 crc kubenswrapper[4792]: I0929 18:57:36.296105 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:36Z","lastTransitionTime":"2025-09-29T18:57:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:36 crc kubenswrapper[4792]: I0929 18:57:36.398280 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:36 crc kubenswrapper[4792]: I0929 18:57:36.398334 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:36 crc kubenswrapper[4792]: I0929 18:57:36.398347 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:36 crc kubenswrapper[4792]: I0929 18:57:36.398368 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:36 crc kubenswrapper[4792]: I0929 18:57:36.398383 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:36Z","lastTransitionTime":"2025-09-29T18:57:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:36 crc kubenswrapper[4792]: I0929 18:57:36.500461 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:36 crc kubenswrapper[4792]: I0929 18:57:36.500515 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:36 crc kubenswrapper[4792]: I0929 18:57:36.500529 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:36 crc kubenswrapper[4792]: I0929 18:57:36.500550 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:36 crc kubenswrapper[4792]: I0929 18:57:36.500566 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:36Z","lastTransitionTime":"2025-09-29T18:57:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:36 crc kubenswrapper[4792]: I0929 18:57:36.602700 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:36 crc kubenswrapper[4792]: I0929 18:57:36.602762 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:36 crc kubenswrapper[4792]: I0929 18:57:36.602775 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:36 crc kubenswrapper[4792]: I0929 18:57:36.602807 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:36 crc kubenswrapper[4792]: I0929 18:57:36.602820 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:36Z","lastTransitionTime":"2025-09-29T18:57:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:36 crc kubenswrapper[4792]: I0929 18:57:36.706117 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:36 crc kubenswrapper[4792]: I0929 18:57:36.706184 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:36 crc kubenswrapper[4792]: I0929 18:57:36.706208 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:36 crc kubenswrapper[4792]: I0929 18:57:36.706245 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:36 crc kubenswrapper[4792]: I0929 18:57:36.706273 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:36Z","lastTransitionTime":"2025-09-29T18:57:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:36 crc kubenswrapper[4792]: I0929 18:57:36.809626 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:36 crc kubenswrapper[4792]: I0929 18:57:36.809688 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:36 crc kubenswrapper[4792]: I0929 18:57:36.809715 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:36 crc kubenswrapper[4792]: I0929 18:57:36.809741 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:36 crc kubenswrapper[4792]: I0929 18:57:36.809757 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:36Z","lastTransitionTime":"2025-09-29T18:57:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:36 crc kubenswrapper[4792]: I0929 18:57:36.912660 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:36 crc kubenswrapper[4792]: I0929 18:57:36.912727 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:36 crc kubenswrapper[4792]: I0929 18:57:36.912741 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:36 crc kubenswrapper[4792]: I0929 18:57:36.912766 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:36 crc kubenswrapper[4792]: I0929 18:57:36.912781 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:36Z","lastTransitionTime":"2025-09-29T18:57:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:37 crc kubenswrapper[4792]: I0929 18:57:37.014546 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 18:57:37 crc kubenswrapper[4792]: I0929 18:57:37.014546 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-v5b2m" Sep 29 18:57:37 crc kubenswrapper[4792]: I0929 18:57:37.014551 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 18:57:37 crc kubenswrapper[4792]: I0929 18:57:37.014580 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 18:57:37 crc kubenswrapper[4792]: E0929 18:57:37.014751 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 18:57:37 crc kubenswrapper[4792]: E0929 18:57:37.014833 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 18:57:37 crc kubenswrapper[4792]: E0929 18:57:37.014945 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-v5b2m" podUID="fd292349-0e5a-4d80-b163-193aa43c98db" Sep 29 18:57:37 crc kubenswrapper[4792]: E0929 18:57:37.015019 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 18:57:37 crc kubenswrapper[4792]: I0929 18:57:37.017801 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:37 crc kubenswrapper[4792]: I0929 18:57:37.017842 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:37 crc kubenswrapper[4792]: I0929 18:57:37.017876 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:37 crc kubenswrapper[4792]: I0929 18:57:37.017900 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:37 crc kubenswrapper[4792]: I0929 18:57:37.017917 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:37Z","lastTransitionTime":"2025-09-29T18:57:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:37 crc kubenswrapper[4792]: I0929 18:57:37.121011 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:37 crc kubenswrapper[4792]: I0929 18:57:37.121058 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:37 crc kubenswrapper[4792]: I0929 18:57:37.121068 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:37 crc kubenswrapper[4792]: I0929 18:57:37.121085 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:37 crc kubenswrapper[4792]: I0929 18:57:37.121096 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:37Z","lastTransitionTime":"2025-09-29T18:57:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:37 crc kubenswrapper[4792]: I0929 18:57:37.224121 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:37 crc kubenswrapper[4792]: I0929 18:57:37.224188 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:37 crc kubenswrapper[4792]: I0929 18:57:37.224201 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:37 crc kubenswrapper[4792]: I0929 18:57:37.224223 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:37 crc kubenswrapper[4792]: I0929 18:57:37.224236 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:37Z","lastTransitionTime":"2025-09-29T18:57:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:37 crc kubenswrapper[4792]: I0929 18:57:37.327008 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:37 crc kubenswrapper[4792]: I0929 18:57:37.327056 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:37 crc kubenswrapper[4792]: I0929 18:57:37.327065 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:37 crc kubenswrapper[4792]: I0929 18:57:37.327087 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:37 crc kubenswrapper[4792]: I0929 18:57:37.327099 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:37Z","lastTransitionTime":"2025-09-29T18:57:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:37 crc kubenswrapper[4792]: I0929 18:57:37.430098 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:37 crc kubenswrapper[4792]: I0929 18:57:37.430144 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:37 crc kubenswrapper[4792]: I0929 18:57:37.430158 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:37 crc kubenswrapper[4792]: I0929 18:57:37.430176 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:37 crc kubenswrapper[4792]: I0929 18:57:37.430188 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:37Z","lastTransitionTime":"2025-09-29T18:57:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:37 crc kubenswrapper[4792]: I0929 18:57:37.533875 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:37 crc kubenswrapper[4792]: I0929 18:57:37.533928 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:37 crc kubenswrapper[4792]: I0929 18:57:37.533945 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:37 crc kubenswrapper[4792]: I0929 18:57:37.533968 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:37 crc kubenswrapper[4792]: I0929 18:57:37.533984 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:37Z","lastTransitionTime":"2025-09-29T18:57:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:37 crc kubenswrapper[4792]: I0929 18:57:37.636437 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:37 crc kubenswrapper[4792]: I0929 18:57:37.636478 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:37 crc kubenswrapper[4792]: I0929 18:57:37.636486 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:37 crc kubenswrapper[4792]: I0929 18:57:37.636518 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:37 crc kubenswrapper[4792]: I0929 18:57:37.636529 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:37Z","lastTransitionTime":"2025-09-29T18:57:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:37 crc kubenswrapper[4792]: I0929 18:57:37.739995 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:37 crc kubenswrapper[4792]: I0929 18:57:37.740051 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:37 crc kubenswrapper[4792]: I0929 18:57:37.740060 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:37 crc kubenswrapper[4792]: I0929 18:57:37.740074 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:37 crc kubenswrapper[4792]: I0929 18:57:37.740083 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:37Z","lastTransitionTime":"2025-09-29T18:57:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:37 crc kubenswrapper[4792]: I0929 18:57:37.842213 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:37 crc kubenswrapper[4792]: I0929 18:57:37.842252 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:37 crc kubenswrapper[4792]: I0929 18:57:37.842261 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:37 crc kubenswrapper[4792]: I0929 18:57:37.842291 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:37 crc kubenswrapper[4792]: I0929 18:57:37.842305 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:37Z","lastTransitionTime":"2025-09-29T18:57:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:37 crc kubenswrapper[4792]: I0929 18:57:37.945312 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:37 crc kubenswrapper[4792]: I0929 18:57:37.945381 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:37 crc kubenswrapper[4792]: I0929 18:57:37.945392 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:37 crc kubenswrapper[4792]: I0929 18:57:37.945409 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:37 crc kubenswrapper[4792]: I0929 18:57:37.945422 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:37Z","lastTransitionTime":"2025-09-29T18:57:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:38 crc kubenswrapper[4792]: I0929 18:57:38.047755 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:38 crc kubenswrapper[4792]: I0929 18:57:38.047803 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:38 crc kubenswrapper[4792]: I0929 18:57:38.047816 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:38 crc kubenswrapper[4792]: I0929 18:57:38.047834 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:38 crc kubenswrapper[4792]: I0929 18:57:38.047881 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:38Z","lastTransitionTime":"2025-09-29T18:57:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:38 crc kubenswrapper[4792]: I0929 18:57:38.150619 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:38 crc kubenswrapper[4792]: I0929 18:57:38.150666 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:38 crc kubenswrapper[4792]: I0929 18:57:38.150678 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:38 crc kubenswrapper[4792]: I0929 18:57:38.150696 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:38 crc kubenswrapper[4792]: I0929 18:57:38.150709 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:38Z","lastTransitionTime":"2025-09-29T18:57:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:38 crc kubenswrapper[4792]: I0929 18:57:38.253145 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:38 crc kubenswrapper[4792]: I0929 18:57:38.253187 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:38 crc kubenswrapper[4792]: I0929 18:57:38.253205 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:38 crc kubenswrapper[4792]: I0929 18:57:38.253228 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:38 crc kubenswrapper[4792]: I0929 18:57:38.253241 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:38Z","lastTransitionTime":"2025-09-29T18:57:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:38 crc kubenswrapper[4792]: I0929 18:57:38.355012 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:38 crc kubenswrapper[4792]: I0929 18:57:38.355051 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:38 crc kubenswrapper[4792]: I0929 18:57:38.355063 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:38 crc kubenswrapper[4792]: I0929 18:57:38.355080 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:38 crc kubenswrapper[4792]: I0929 18:57:38.355091 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:38Z","lastTransitionTime":"2025-09-29T18:57:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:38 crc kubenswrapper[4792]: I0929 18:57:38.457458 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:38 crc kubenswrapper[4792]: I0929 18:57:38.457501 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:38 crc kubenswrapper[4792]: I0929 18:57:38.457511 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:38 crc kubenswrapper[4792]: I0929 18:57:38.457527 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:38 crc kubenswrapper[4792]: I0929 18:57:38.457537 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:38Z","lastTransitionTime":"2025-09-29T18:57:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:38 crc kubenswrapper[4792]: I0929 18:57:38.559671 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:38 crc kubenswrapper[4792]: I0929 18:57:38.559703 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:38 crc kubenswrapper[4792]: I0929 18:57:38.559711 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:38 crc kubenswrapper[4792]: I0929 18:57:38.559726 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:38 crc kubenswrapper[4792]: I0929 18:57:38.559735 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:38Z","lastTransitionTime":"2025-09-29T18:57:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:38 crc kubenswrapper[4792]: I0929 18:57:38.661524 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:38 crc kubenswrapper[4792]: I0929 18:57:38.661561 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:38 crc kubenswrapper[4792]: I0929 18:57:38.661571 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:38 crc kubenswrapper[4792]: I0929 18:57:38.661588 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:38 crc kubenswrapper[4792]: I0929 18:57:38.661598 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:38Z","lastTransitionTime":"2025-09-29T18:57:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:38 crc kubenswrapper[4792]: I0929 18:57:38.763182 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:38 crc kubenswrapper[4792]: I0929 18:57:38.763221 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:38 crc kubenswrapper[4792]: I0929 18:57:38.763230 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:38 crc kubenswrapper[4792]: I0929 18:57:38.763245 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:38 crc kubenswrapper[4792]: I0929 18:57:38.763254 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:38Z","lastTransitionTime":"2025-09-29T18:57:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:38 crc kubenswrapper[4792]: I0929 18:57:38.865173 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:38 crc kubenswrapper[4792]: I0929 18:57:38.865214 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:38 crc kubenswrapper[4792]: I0929 18:57:38.865225 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:38 crc kubenswrapper[4792]: I0929 18:57:38.865240 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:38 crc kubenswrapper[4792]: I0929 18:57:38.865250 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:38Z","lastTransitionTime":"2025-09-29T18:57:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:38 crc kubenswrapper[4792]: I0929 18:57:38.967105 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:38 crc kubenswrapper[4792]: I0929 18:57:38.967158 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:38 crc kubenswrapper[4792]: I0929 18:57:38.967170 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:38 crc kubenswrapper[4792]: I0929 18:57:38.967183 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:38 crc kubenswrapper[4792]: I0929 18:57:38.967191 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:38Z","lastTransitionTime":"2025-09-29T18:57:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:39 crc kubenswrapper[4792]: I0929 18:57:39.014638 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 18:57:39 crc kubenswrapper[4792]: I0929 18:57:39.014671 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 18:57:39 crc kubenswrapper[4792]: I0929 18:57:39.014677 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 18:57:39 crc kubenswrapper[4792]: I0929 18:57:39.014638 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-v5b2m" Sep 29 18:57:39 crc kubenswrapper[4792]: E0929 18:57:39.014758 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 18:57:39 crc kubenswrapper[4792]: E0929 18:57:39.014820 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 18:57:39 crc kubenswrapper[4792]: E0929 18:57:39.014909 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-v5b2m" podUID="fd292349-0e5a-4d80-b163-193aa43c98db" Sep 29 18:57:39 crc kubenswrapper[4792]: E0929 18:57:39.014997 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 18:57:39 crc kubenswrapper[4792]: I0929 18:57:39.029525 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"49187618-8fed-4b0f-bdf8-800408f708fc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://325b543480e9e1abd49c6ce98398a79ef51983b8035774b2e88447ee547733c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://12d3875b8db9620798f766024b1bc43b78759f42e467b67aaf87f0b0154a8fad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://100ab44da711fddded7f88aa053b6a47d1c8302557d9ae6a56d4f744140e34bd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://af34e705a941f92c031edf3d214a902640010036401914f60e598a46043d5eb3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:29Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:39Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:39 crc kubenswrapper[4792]: I0929 18:57:39.039700 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:52Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:52Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cb29207afd9a5fb06242890aaf6d32f2f789cbf824b0246706e7214486ac529c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:39Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:39 crc kubenswrapper[4792]: I0929 18:57:39.049524 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0ae66548-086e-4ca9-bd6f-281ce46e7557\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b27d8e307d9f6545acd48d9a838dc98fec84ca2e48b357966af22144b8cd415f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://305645f1f10b20984067c3d0d32bc9a5936e191faecff2bb494be005fc471c65\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:53Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-p5q59\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:39Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:39 crc kubenswrapper[4792]: I0929 18:57:39.063976 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-rqbjv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"67c58ee5-e056-4e3e-91ed-a116350f2408\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://17470ef608c6b717c0346349c1e72046e200b3879df2772778878b0e83c05b7e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:57:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ms9xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cdd799a430b3a444b7ba74ae8c285de28790049390a462485812fe117f9dfbe8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cdd799a430b3a444b7ba74ae8c285de28790049390a462485812fe117f9dfbe8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T18:56:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ms9xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1d779fed3cf67ba40d6664f26d829858ec14749c48c09678b73d7fb8fe73c827\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1d779fed3cf67ba40d6664f26d829858ec14749c48c09678b73d7fb8fe73c827\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T18:56:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ms9xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3094a1172df2fd98e699c4d368a14584e51bce43389c9c6432e24e78d460a3eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3094a1172df2fd98e699c4d368a14584e51bce43389c9c6432e24e78d460a3eb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T18:56:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ms9xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://79dfa5c03ec31df7b6477646c437b7490658801c0b8f7fac5e9149e4c7a882bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://79dfa5c03ec31df7b6477646c437b7490658801c0b8f7fac5e9149e4c7a882bf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T18:56:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ms9xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://aed6e427a87d4a4617a9d1c9a4d37cf2f9815d1759336026545d563b1f9b6976\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aed6e427a87d4a4617a9d1c9a4d37cf2f9815d1759336026545d563b1f9b6976\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T18:57:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ms9xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0830e9f46c282fc2be6beeea2654758eb0b3a0a86b802f495928c846bc49f7ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0830e9f46c282fc2be6beeea2654758eb0b3a0a86b802f495928c846bc49f7ed\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T18:57:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T18:57:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ms9xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:53Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-rqbjv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:39Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:39 crc kubenswrapper[4792]: I0929 18:57:39.072204 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:39 crc kubenswrapper[4792]: I0929 18:57:39.072245 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:39 crc kubenswrapper[4792]: I0929 18:57:39.072255 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:39 crc kubenswrapper[4792]: I0929 18:57:39.072270 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:39 crc kubenswrapper[4792]: I0929 18:57:39.072279 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:39Z","lastTransitionTime":"2025-09-29T18:57:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:39 crc kubenswrapper[4792]: I0929 18:57:39.076513 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-v5b2m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fd292349-0e5a-4d80-b163-193aa43c98db\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:07Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:07Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:07Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d8ps7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d8ps7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:57:07Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-v5b2m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:39Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:39 crc kubenswrapper[4792]: I0929 18:57:39.090894 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"10bc9cb6-78d3-43a6-8276-db1cb1c116e0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://238035b6ad975064a44e7e9e760ae9f09c9ff2735ecc69f65df3fc3176f6d3b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://659f798faed7d5c35bf7959b8e42a37f2289854714d513962716cb73a0674a27\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://38d29fa86782b007a60cbc1ea8f2ccc250dabb38c84eb61931b05fbc170e6538\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f292b6e3b4a31cc851066c3112871836e2c896dea8913da0d3c5579fe5ebb65\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0fdb8c0687f0a5ce31078f6d7a9b643c41ad23199eff4b2878403ee5fd31f69f\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T18:56:45Z\\\",\\\"message\\\":\\\"9 18:56:45.310775 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0929 18:56:45.310981 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI0929 18:56:45.312802 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0929 18:56:45.312836 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0929 18:56:45.312870 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0929 18:56:45.312900 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0929 18:56:45.312941 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI0929 18:56:45.312944 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0929 18:56:45.312985 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI0929 18:56:45.313033 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI0929 18:56:45.313121 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-682747971/tls.crt::/tmp/serving-cert-682747971/tls.key\\\\\\\"\\\\nI0929 18:56:45.313163 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-682747971/tls.crt::/tmp/serving-cert-682747971/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1759172204\\\\\\\\\\\\\\\" (2025-09-29 18:56:43 +0000 UTC to 2025-10-29 18:56:44 +0000 UTC (now=2025-09-29 18:56:45.313121069 +0000 UTC))\\\\\\\"\\\\nF0929 18:56:45.313206 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:44Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:57:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://07171a8c3c7812c016ee534ff1332f697d2b2cdfa70fc9d94ae6a5f312e0e433\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:32Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6e733b73d0293211bf2e8e97dc7db49c34e8ac1ef1e6e19013183d8518345959\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6e733b73d0293211bf2e8e97dc7db49c34e8ac1ef1e6e19013183d8518345959\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T18:56:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:29Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:39Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:39 crc kubenswrapper[4792]: I0929 18:57:39.104083 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ba8b9fef5faf6504a0e363f092cc9f60b03723775a0a0624b6302b3dac43a7ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:39Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:39 crc kubenswrapper[4792]: I0929 18:57:39.118010 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8067c4cf598ce2d361c4a76b51ef3cf14d1fc84ad7ee193d76e20cd980f197be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://693ef3ee15f0b8762a16adc20435397e073dea4b0028f4175899cb956eaab303\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:39Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:39 crc kubenswrapper[4792]: I0929 18:57:39.138604 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-hr4cm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"716c5fdd-0e02-4066-9210-93d805b6fe81\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:53Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:53Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7b64445ce1e067504326c5005136522f885ba8796579cfb651019d2372a89173\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://46a3cf64e8fd5f5c75be0dd56175bd00e95e2780c73e39558e3b68ca1e6a44bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3408e50d82d1e7f50d9cd4fb2b4e078059bbc4daba10ca93c3cab56d4fe190be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a5023531ae972c8f19f5fbf8cdb3c4040f1b63d5d7b9d00e885607f0f84c88a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e9625b3628f291ecaa686da104d719695bd8c46eb46d08f9eccab27a2013627\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3a44c0899a9afeaa74bb22565c3f9514603ce1b83f9794539f677d067785925\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1121dcf30430fdbc7f6353dacdf1f0233d9053a2acfd8e7e248bc6e9faaf66e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1121dcf30430fdbc7f6353dacdf1f0233d9053a2acfd8e7e248bc6e9faaf66e2\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T18:57:20Z\\\",\\\"message\\\":\\\".861680 6354 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:20Z is after 2025-08-24T17:21:41Z]\\\\nI0929 18:57:20.861689 6354 base_network_controller_pods.go:477] [default/openshift-network-diagnostics/network-check-source-55646444c4-trplf] creating logical port openshift-network-diagnostics_network-check-source-55646444c4-trplf for pod on switch crc\\\\nI0929 18:57:20.861690 6354 services_controller.go:451] Built service openshift-kube-scheduler-operator/metrics cluster-wide LB for network=default: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-kube-scheduler-operator/metrics_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", E\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T18:57:20Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-hr4cm_openshift-ovn-kubernetes(716c5fdd-0e02-4066-9210-93d805b6fe81)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7138892e31e3d1949d0ae4789515fc0fd9868469eb14de1464a2f59786b85f08\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d0516004c2ea4a5711f5e00dcfa01fd5c8d0c0d0d60fd31b0d7da586cd83a820\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d0516004c2ea4a5711f5e00dcfa01fd5c8d0c0d0d60fd31b0d7da586cd83a820\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T18:56:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:53Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-hr4cm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:39Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:39 crc kubenswrapper[4792]: I0929 18:57:39.150882 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-5hwvp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"100876d3-2539-47f1-91fa-0f91456ccac1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3ff4d11cfba0349ddf3f5a14c525716cfdff95c71698634e8feca328d6e41e2d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfblz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:53Z\\\"}}\" for pod \"openshift-multus\"/\"multus-5hwvp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:39Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:39 crc kubenswrapper[4792]: I0929 18:57:39.162923 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4df360a8-146c-4e9e-8e52-498553bdf779\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1f4bce20bf980ce88d41df2f3c40acfc93739122e3173ea15ceb0122219338f8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d2e3b84a17bd084dfa82505af7e250aea64057db67fdf494d8653ba30a883b6d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c31040d1362524ea1a3ee9961c3eb7b97c7c76709465df7590129430ddd3bb4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://db5ac0d0d3d08ebd2278af02df1e4639df5c635bfee2ec6fb8293e18c648af76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://db5ac0d0d3d08ebd2278af02df1e4639df5c635bfee2ec6fb8293e18c648af76\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T18:56:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:30Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:29Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:39Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:39 crc kubenswrapper[4792]: I0929 18:57:39.173825 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:39 crc kubenswrapper[4792]: I0929 18:57:39.173886 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:39 crc kubenswrapper[4792]: I0929 18:57:39.173897 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:39 crc kubenswrapper[4792]: I0929 18:57:39.173912 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:39 crc kubenswrapper[4792]: I0929 18:57:39.173924 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:39Z","lastTransitionTime":"2025-09-29T18:57:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:39 crc kubenswrapper[4792]: I0929 18:57:39.176955 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:39Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:39 crc kubenswrapper[4792]: I0929 18:57:39.189489 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:39Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:39 crc kubenswrapper[4792]: I0929 18:57:39.199057 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-c228l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc16dcda-372e-4aac-8c12-148bf93e8783\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://af2529526423852e215c3201a4d8807a880e07e9cf71d593f304a4a3c99900eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6mz5l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:52Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-c228l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:39Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:39 crc kubenswrapper[4792]: I0929 18:57:39.210020 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-rr4g5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1762a3e4-6068-48d9-9b1d-bd5b893803bb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7d6bc8edd4388d2cc81a9741b23060a4982a85a1ac8ec23f2052436cff8cd7fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:57:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xnnkn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8053630e82cf267f19de300922033d2cade4b754707ca7c0d7fbfd7e4957eefc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:57:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xnnkn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:57:06Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-rr4g5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:39Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:39 crc kubenswrapper[4792]: I0929 18:57:39.219729 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:39Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:39 crc kubenswrapper[4792]: I0929 18:57:39.228914 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-4gmtk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b84b9e91-b50e-4271-bfc8-be15652128c5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b32084075b7423c8211ca56595a2eb11add581b500043804cb09f13d07788bd6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lc999\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:55Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-4gmtk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:39Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:39 crc kubenswrapper[4792]: I0929 18:57:39.275713 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:39 crc kubenswrapper[4792]: I0929 18:57:39.275750 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:39 crc kubenswrapper[4792]: I0929 18:57:39.275760 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:39 crc kubenswrapper[4792]: I0929 18:57:39.275779 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:39 crc kubenswrapper[4792]: I0929 18:57:39.275791 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:39Z","lastTransitionTime":"2025-09-29T18:57:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:39 crc kubenswrapper[4792]: I0929 18:57:39.366713 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/fd292349-0e5a-4d80-b163-193aa43c98db-metrics-certs\") pod \"network-metrics-daemon-v5b2m\" (UID: \"fd292349-0e5a-4d80-b163-193aa43c98db\") " pod="openshift-multus/network-metrics-daemon-v5b2m" Sep 29 18:57:39 crc kubenswrapper[4792]: E0929 18:57:39.366879 4792 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Sep 29 18:57:39 crc kubenswrapper[4792]: E0929 18:57:39.366934 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/fd292349-0e5a-4d80-b163-193aa43c98db-metrics-certs podName:fd292349-0e5a-4d80-b163-193aa43c98db nodeName:}" failed. No retries permitted until 2025-09-29 18:58:11.366920545 +0000 UTC m=+103.360227941 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/fd292349-0e5a-4d80-b163-193aa43c98db-metrics-certs") pod "network-metrics-daemon-v5b2m" (UID: "fd292349-0e5a-4d80-b163-193aa43c98db") : object "openshift-multus"/"metrics-daemon-secret" not registered Sep 29 18:57:39 crc kubenswrapper[4792]: I0929 18:57:39.381320 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:39 crc kubenswrapper[4792]: I0929 18:57:39.381366 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:39 crc kubenswrapper[4792]: I0929 18:57:39.381378 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:39 crc kubenswrapper[4792]: I0929 18:57:39.381398 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:39 crc kubenswrapper[4792]: I0929 18:57:39.381412 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:39Z","lastTransitionTime":"2025-09-29T18:57:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:39 crc kubenswrapper[4792]: I0929 18:57:39.483492 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:39 crc kubenswrapper[4792]: I0929 18:57:39.483525 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:39 crc kubenswrapper[4792]: I0929 18:57:39.483553 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:39 crc kubenswrapper[4792]: I0929 18:57:39.483568 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:39 crc kubenswrapper[4792]: I0929 18:57:39.483577 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:39Z","lastTransitionTime":"2025-09-29T18:57:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:39 crc kubenswrapper[4792]: I0929 18:57:39.585634 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:39 crc kubenswrapper[4792]: I0929 18:57:39.585669 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:39 crc kubenswrapper[4792]: I0929 18:57:39.585678 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:39 crc kubenswrapper[4792]: I0929 18:57:39.585692 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:39 crc kubenswrapper[4792]: I0929 18:57:39.585702 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:39Z","lastTransitionTime":"2025-09-29T18:57:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:39 crc kubenswrapper[4792]: I0929 18:57:39.688558 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:39 crc kubenswrapper[4792]: I0929 18:57:39.688715 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:39 crc kubenswrapper[4792]: I0929 18:57:39.688742 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:39 crc kubenswrapper[4792]: I0929 18:57:39.688774 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:39 crc kubenswrapper[4792]: I0929 18:57:39.688798 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:39Z","lastTransitionTime":"2025-09-29T18:57:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:39 crc kubenswrapper[4792]: I0929 18:57:39.792358 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:39 crc kubenswrapper[4792]: I0929 18:57:39.792420 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:39 crc kubenswrapper[4792]: I0929 18:57:39.792439 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:39 crc kubenswrapper[4792]: I0929 18:57:39.792462 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:39 crc kubenswrapper[4792]: I0929 18:57:39.792480 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:39Z","lastTransitionTime":"2025-09-29T18:57:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:39 crc kubenswrapper[4792]: I0929 18:57:39.895539 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:39 crc kubenswrapper[4792]: I0929 18:57:39.895615 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:39 crc kubenswrapper[4792]: I0929 18:57:39.895636 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:39 crc kubenswrapper[4792]: I0929 18:57:39.895659 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:39 crc kubenswrapper[4792]: I0929 18:57:39.895677 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:39Z","lastTransitionTime":"2025-09-29T18:57:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:39 crc kubenswrapper[4792]: I0929 18:57:39.998932 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:39 crc kubenswrapper[4792]: I0929 18:57:39.999191 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:39 crc kubenswrapper[4792]: I0929 18:57:39.999287 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:39 crc kubenswrapper[4792]: I0929 18:57:39.999389 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:39 crc kubenswrapper[4792]: I0929 18:57:39.999455 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:39Z","lastTransitionTime":"2025-09-29T18:57:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:40 crc kubenswrapper[4792]: I0929 18:57:40.101113 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:40 crc kubenswrapper[4792]: I0929 18:57:40.101160 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:40 crc kubenswrapper[4792]: I0929 18:57:40.101172 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:40 crc kubenswrapper[4792]: I0929 18:57:40.101191 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:40 crc kubenswrapper[4792]: I0929 18:57:40.101204 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:40Z","lastTransitionTime":"2025-09-29T18:57:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:40 crc kubenswrapper[4792]: I0929 18:57:40.203014 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:40 crc kubenswrapper[4792]: I0929 18:57:40.203049 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:40 crc kubenswrapper[4792]: I0929 18:57:40.203056 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:40 crc kubenswrapper[4792]: I0929 18:57:40.203070 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:40 crc kubenswrapper[4792]: I0929 18:57:40.203080 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:40Z","lastTransitionTime":"2025-09-29T18:57:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:40 crc kubenswrapper[4792]: I0929 18:57:40.305579 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:40 crc kubenswrapper[4792]: I0929 18:57:40.305648 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:40 crc kubenswrapper[4792]: I0929 18:57:40.305667 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:40 crc kubenswrapper[4792]: I0929 18:57:40.305691 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:40 crc kubenswrapper[4792]: I0929 18:57:40.305707 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:40Z","lastTransitionTime":"2025-09-29T18:57:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:40 crc kubenswrapper[4792]: I0929 18:57:40.407626 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:40 crc kubenswrapper[4792]: I0929 18:57:40.407674 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:40 crc kubenswrapper[4792]: I0929 18:57:40.407687 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:40 crc kubenswrapper[4792]: I0929 18:57:40.407705 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:40 crc kubenswrapper[4792]: I0929 18:57:40.407719 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:40Z","lastTransitionTime":"2025-09-29T18:57:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:40 crc kubenswrapper[4792]: I0929 18:57:40.510257 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:40 crc kubenswrapper[4792]: I0929 18:57:40.510306 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:40 crc kubenswrapper[4792]: I0929 18:57:40.510319 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:40 crc kubenswrapper[4792]: I0929 18:57:40.510339 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:40 crc kubenswrapper[4792]: I0929 18:57:40.510351 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:40Z","lastTransitionTime":"2025-09-29T18:57:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:40 crc kubenswrapper[4792]: I0929 18:57:40.612220 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:40 crc kubenswrapper[4792]: I0929 18:57:40.612266 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:40 crc kubenswrapper[4792]: I0929 18:57:40.612278 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:40 crc kubenswrapper[4792]: I0929 18:57:40.612292 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:40 crc kubenswrapper[4792]: I0929 18:57:40.612305 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:40Z","lastTransitionTime":"2025-09-29T18:57:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:40 crc kubenswrapper[4792]: I0929 18:57:40.714442 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:40 crc kubenswrapper[4792]: I0929 18:57:40.714744 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:40 crc kubenswrapper[4792]: I0929 18:57:40.714807 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:40 crc kubenswrapper[4792]: I0929 18:57:40.714900 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:40 crc kubenswrapper[4792]: I0929 18:57:40.714976 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:40Z","lastTransitionTime":"2025-09-29T18:57:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:40 crc kubenswrapper[4792]: I0929 18:57:40.817357 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:40 crc kubenswrapper[4792]: I0929 18:57:40.817397 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:40 crc kubenswrapper[4792]: I0929 18:57:40.817406 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:40 crc kubenswrapper[4792]: I0929 18:57:40.817420 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:40 crc kubenswrapper[4792]: I0929 18:57:40.817429 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:40Z","lastTransitionTime":"2025-09-29T18:57:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:40 crc kubenswrapper[4792]: I0929 18:57:40.920394 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:40 crc kubenswrapper[4792]: I0929 18:57:40.920675 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:40 crc kubenswrapper[4792]: I0929 18:57:40.920771 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:40 crc kubenswrapper[4792]: I0929 18:57:40.920879 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:40 crc kubenswrapper[4792]: I0929 18:57:40.920974 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:40Z","lastTransitionTime":"2025-09-29T18:57:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:41 crc kubenswrapper[4792]: I0929 18:57:41.015155 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 18:57:41 crc kubenswrapper[4792]: I0929 18:57:41.015197 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-v5b2m" Sep 29 18:57:41 crc kubenswrapper[4792]: I0929 18:57:41.015744 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 18:57:41 crc kubenswrapper[4792]: E0929 18:57:41.016008 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 18:57:41 crc kubenswrapper[4792]: I0929 18:57:41.016036 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 18:57:41 crc kubenswrapper[4792]: E0929 18:57:41.016199 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-v5b2m" podUID="fd292349-0e5a-4d80-b163-193aa43c98db" Sep 29 18:57:41 crc kubenswrapper[4792]: E0929 18:57:41.016281 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 18:57:41 crc kubenswrapper[4792]: E0929 18:57:41.016344 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 18:57:41 crc kubenswrapper[4792]: I0929 18:57:41.022559 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:41 crc kubenswrapper[4792]: I0929 18:57:41.022585 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:41 crc kubenswrapper[4792]: I0929 18:57:41.022593 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:41 crc kubenswrapper[4792]: I0929 18:57:41.022603 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:41 crc kubenswrapper[4792]: I0929 18:57:41.022614 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:41Z","lastTransitionTime":"2025-09-29T18:57:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:41 crc kubenswrapper[4792]: I0929 18:57:41.124504 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:41 crc kubenswrapper[4792]: I0929 18:57:41.124572 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:41 crc kubenswrapper[4792]: I0929 18:57:41.124587 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:41 crc kubenswrapper[4792]: I0929 18:57:41.124604 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:41 crc kubenswrapper[4792]: I0929 18:57:41.124617 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:41Z","lastTransitionTime":"2025-09-29T18:57:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:41 crc kubenswrapper[4792]: I0929 18:57:41.226967 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:41 crc kubenswrapper[4792]: I0929 18:57:41.227015 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:41 crc kubenswrapper[4792]: I0929 18:57:41.227026 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:41 crc kubenswrapper[4792]: I0929 18:57:41.227043 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:41 crc kubenswrapper[4792]: I0929 18:57:41.227055 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:41Z","lastTransitionTime":"2025-09-29T18:57:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:41 crc kubenswrapper[4792]: I0929 18:57:41.329425 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:41 crc kubenswrapper[4792]: I0929 18:57:41.329486 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:41 crc kubenswrapper[4792]: I0929 18:57:41.329503 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:41 crc kubenswrapper[4792]: I0929 18:57:41.329527 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:41 crc kubenswrapper[4792]: I0929 18:57:41.329545 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:41Z","lastTransitionTime":"2025-09-29T18:57:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:41 crc kubenswrapper[4792]: I0929 18:57:41.431390 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:41 crc kubenswrapper[4792]: I0929 18:57:41.431421 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:41 crc kubenswrapper[4792]: I0929 18:57:41.431429 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:41 crc kubenswrapper[4792]: I0929 18:57:41.431441 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:41 crc kubenswrapper[4792]: I0929 18:57:41.431449 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:41Z","lastTransitionTime":"2025-09-29T18:57:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:41 crc kubenswrapper[4792]: I0929 18:57:41.533682 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:41 crc kubenswrapper[4792]: I0929 18:57:41.533718 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:41 crc kubenswrapper[4792]: I0929 18:57:41.533727 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:41 crc kubenswrapper[4792]: I0929 18:57:41.533740 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:41 crc kubenswrapper[4792]: I0929 18:57:41.533749 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:41Z","lastTransitionTime":"2025-09-29T18:57:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:41 crc kubenswrapper[4792]: I0929 18:57:41.635988 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:41 crc kubenswrapper[4792]: I0929 18:57:41.636023 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:41 crc kubenswrapper[4792]: I0929 18:57:41.636036 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:41 crc kubenswrapper[4792]: I0929 18:57:41.636052 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:41 crc kubenswrapper[4792]: I0929 18:57:41.636063 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:41Z","lastTransitionTime":"2025-09-29T18:57:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:41 crc kubenswrapper[4792]: I0929 18:57:41.738817 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:41 crc kubenswrapper[4792]: I0929 18:57:41.738870 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:41 crc kubenswrapper[4792]: I0929 18:57:41.738881 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:41 crc kubenswrapper[4792]: I0929 18:57:41.738897 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:41 crc kubenswrapper[4792]: I0929 18:57:41.738906 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:41Z","lastTransitionTime":"2025-09-29T18:57:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:41 crc kubenswrapper[4792]: I0929 18:57:41.845155 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:41 crc kubenswrapper[4792]: I0929 18:57:41.845212 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:41 crc kubenswrapper[4792]: I0929 18:57:41.845221 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:41 crc kubenswrapper[4792]: I0929 18:57:41.845234 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:41 crc kubenswrapper[4792]: I0929 18:57:41.845243 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:41Z","lastTransitionTime":"2025-09-29T18:57:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:41 crc kubenswrapper[4792]: I0929 18:57:41.947383 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:41 crc kubenswrapper[4792]: I0929 18:57:41.947422 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:41 crc kubenswrapper[4792]: I0929 18:57:41.947433 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:41 crc kubenswrapper[4792]: I0929 18:57:41.947447 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:41 crc kubenswrapper[4792]: I0929 18:57:41.947458 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:41Z","lastTransitionTime":"2025-09-29T18:57:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:42 crc kubenswrapper[4792]: I0929 18:57:42.050328 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:42 crc kubenswrapper[4792]: I0929 18:57:42.050393 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:42 crc kubenswrapper[4792]: I0929 18:57:42.050411 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:42 crc kubenswrapper[4792]: I0929 18:57:42.050439 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:42 crc kubenswrapper[4792]: I0929 18:57:42.050458 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:42Z","lastTransitionTime":"2025-09-29T18:57:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:42 crc kubenswrapper[4792]: I0929 18:57:42.152372 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:42 crc kubenswrapper[4792]: I0929 18:57:42.152434 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:42 crc kubenswrapper[4792]: I0929 18:57:42.152445 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:42 crc kubenswrapper[4792]: I0929 18:57:42.152461 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:42 crc kubenswrapper[4792]: I0929 18:57:42.152494 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:42Z","lastTransitionTime":"2025-09-29T18:57:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:42 crc kubenswrapper[4792]: I0929 18:57:42.254567 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:42 crc kubenswrapper[4792]: I0929 18:57:42.254610 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:42 crc kubenswrapper[4792]: I0929 18:57:42.254623 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:42 crc kubenswrapper[4792]: I0929 18:57:42.254642 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:42 crc kubenswrapper[4792]: I0929 18:57:42.254655 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:42Z","lastTransitionTime":"2025-09-29T18:57:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:42 crc kubenswrapper[4792]: I0929 18:57:42.356552 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:42 crc kubenswrapper[4792]: I0929 18:57:42.356583 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:42 crc kubenswrapper[4792]: I0929 18:57:42.356593 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:42 crc kubenswrapper[4792]: I0929 18:57:42.356606 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:42 crc kubenswrapper[4792]: I0929 18:57:42.356617 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:42Z","lastTransitionTime":"2025-09-29T18:57:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:42 crc kubenswrapper[4792]: I0929 18:57:42.459123 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:42 crc kubenswrapper[4792]: I0929 18:57:42.459159 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:42 crc kubenswrapper[4792]: I0929 18:57:42.459169 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:42 crc kubenswrapper[4792]: I0929 18:57:42.459183 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:42 crc kubenswrapper[4792]: I0929 18:57:42.459193 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:42Z","lastTransitionTime":"2025-09-29T18:57:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:42 crc kubenswrapper[4792]: I0929 18:57:42.471833 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-5hwvp_100876d3-2539-47f1-91fa-0f91456ccac1/kube-multus/0.log" Sep 29 18:57:42 crc kubenswrapper[4792]: I0929 18:57:42.472113 4792 generic.go:334] "Generic (PLEG): container finished" podID="100876d3-2539-47f1-91fa-0f91456ccac1" containerID="3ff4d11cfba0349ddf3f5a14c525716cfdff95c71698634e8feca328d6e41e2d" exitCode=1 Sep 29 18:57:42 crc kubenswrapper[4792]: I0929 18:57:42.472211 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-5hwvp" event={"ID":"100876d3-2539-47f1-91fa-0f91456ccac1","Type":"ContainerDied","Data":"3ff4d11cfba0349ddf3f5a14c525716cfdff95c71698634e8feca328d6e41e2d"} Sep 29 18:57:42 crc kubenswrapper[4792]: I0929 18:57:42.472707 4792 scope.go:117] "RemoveContainer" containerID="3ff4d11cfba0349ddf3f5a14c525716cfdff95c71698634e8feca328d6e41e2d" Sep 29 18:57:42 crc kubenswrapper[4792]: I0929 18:57:42.488293 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"10bc9cb6-78d3-43a6-8276-db1cb1c116e0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://238035b6ad975064a44e7e9e760ae9f09c9ff2735ecc69f65df3fc3176f6d3b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://659f798faed7d5c35bf7959b8e42a37f2289854714d513962716cb73a0674a27\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://38d29fa86782b007a60cbc1ea8f2ccc250dabb38c84eb61931b05fbc170e6538\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f292b6e3b4a31cc851066c3112871836e2c896dea8913da0d3c5579fe5ebb65\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0fdb8c0687f0a5ce31078f6d7a9b643c41ad23199eff4b2878403ee5fd31f69f\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T18:56:45Z\\\",\\\"message\\\":\\\"9 18:56:45.310775 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0929 18:56:45.310981 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI0929 18:56:45.312802 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0929 18:56:45.312836 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0929 18:56:45.312870 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0929 18:56:45.312900 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0929 18:56:45.312941 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI0929 18:56:45.312944 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0929 18:56:45.312985 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI0929 18:56:45.313033 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI0929 18:56:45.313121 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-682747971/tls.crt::/tmp/serving-cert-682747971/tls.key\\\\\\\"\\\\nI0929 18:56:45.313163 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-682747971/tls.crt::/tmp/serving-cert-682747971/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1759172204\\\\\\\\\\\\\\\" (2025-09-29 18:56:43 +0000 UTC to 2025-10-29 18:56:44 +0000 UTC (now=2025-09-29 18:56:45.313121069 +0000 UTC))\\\\\\\"\\\\nF0929 18:56:45.313206 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:44Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:57:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://07171a8c3c7812c016ee534ff1332f697d2b2cdfa70fc9d94ae6a5f312e0e433\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:32Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6e733b73d0293211bf2e8e97dc7db49c34e8ac1ef1e6e19013183d8518345959\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6e733b73d0293211bf2e8e97dc7db49c34e8ac1ef1e6e19013183d8518345959\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T18:56:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:29Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:42Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:42 crc kubenswrapper[4792]: I0929 18:57:42.500636 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ba8b9fef5faf6504a0e363f092cc9f60b03723775a0a0624b6302b3dac43a7ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:42Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:42 crc kubenswrapper[4792]: I0929 18:57:42.515263 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8067c4cf598ce2d361c4a76b51ef3cf14d1fc84ad7ee193d76e20cd980f197be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://693ef3ee15f0b8762a16adc20435397e073dea4b0028f4175899cb956eaab303\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:42Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:42 crc kubenswrapper[4792]: I0929 18:57:42.533600 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-hr4cm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"716c5fdd-0e02-4066-9210-93d805b6fe81\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:53Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:53Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7b64445ce1e067504326c5005136522f885ba8796579cfb651019d2372a89173\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://46a3cf64e8fd5f5c75be0dd56175bd00e95e2780c73e39558e3b68ca1e6a44bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3408e50d82d1e7f50d9cd4fb2b4e078059bbc4daba10ca93c3cab56d4fe190be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a5023531ae972c8f19f5fbf8cdb3c4040f1b63d5d7b9d00e885607f0f84c88a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e9625b3628f291ecaa686da104d719695bd8c46eb46d08f9eccab27a2013627\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3a44c0899a9afeaa74bb22565c3f9514603ce1b83f9794539f677d067785925\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1121dcf30430fdbc7f6353dacdf1f0233d9053a2acfd8e7e248bc6e9faaf66e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1121dcf30430fdbc7f6353dacdf1f0233d9053a2acfd8e7e248bc6e9faaf66e2\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T18:57:20Z\\\",\\\"message\\\":\\\".861680 6354 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:20Z is after 2025-08-24T17:21:41Z]\\\\nI0929 18:57:20.861689 6354 base_network_controller_pods.go:477] [default/openshift-network-diagnostics/network-check-source-55646444c4-trplf] creating logical port openshift-network-diagnostics_network-check-source-55646444c4-trplf for pod on switch crc\\\\nI0929 18:57:20.861690 6354 services_controller.go:451] Built service openshift-kube-scheduler-operator/metrics cluster-wide LB for network=default: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-kube-scheduler-operator/metrics_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", E\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T18:57:20Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-hr4cm_openshift-ovn-kubernetes(716c5fdd-0e02-4066-9210-93d805b6fe81)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7138892e31e3d1949d0ae4789515fc0fd9868469eb14de1464a2f59786b85f08\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d0516004c2ea4a5711f5e00dcfa01fd5c8d0c0d0d60fd31b0d7da586cd83a820\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d0516004c2ea4a5711f5e00dcfa01fd5c8d0c0d0d60fd31b0d7da586cd83a820\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T18:56:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:53Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-hr4cm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:42Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:42 crc kubenswrapper[4792]: I0929 18:57:42.549025 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-5hwvp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"100876d3-2539-47f1-91fa-0f91456ccac1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3ff4d11cfba0349ddf3f5a14c525716cfdff95c71698634e8feca328d6e41e2d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3ff4d11cfba0349ddf3f5a14c525716cfdff95c71698634e8feca328d6e41e2d\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T18:57:41Z\\\",\\\"message\\\":\\\"2025-09-29T18:56:56+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_bcd172c3-8b42-4b95-9328-75a122f6b049\\\\n2025-09-29T18:56:56+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_bcd172c3-8b42-4b95-9328-75a122f6b049 to /host/opt/cni/bin/\\\\n2025-09-29T18:56:56Z [verbose] multus-daemon started\\\\n2025-09-29T18:56:56Z [verbose] Readiness Indicator file check\\\\n2025-09-29T18:57:41Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfblz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:53Z\\\"}}\" for pod \"openshift-multus\"/\"multus-5hwvp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:42Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:42 crc kubenswrapper[4792]: I0929 18:57:42.560880 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:42 crc kubenswrapper[4792]: I0929 18:57:42.560909 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:42 crc kubenswrapper[4792]: I0929 18:57:42.560917 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:42 crc kubenswrapper[4792]: I0929 18:57:42.560945 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:42 crc kubenswrapper[4792]: I0929 18:57:42.560955 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:42Z","lastTransitionTime":"2025-09-29T18:57:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:42 crc kubenswrapper[4792]: I0929 18:57:42.564054 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4df360a8-146c-4e9e-8e52-498553bdf779\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1f4bce20bf980ce88d41df2f3c40acfc93739122e3173ea15ceb0122219338f8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d2e3b84a17bd084dfa82505af7e250aea64057db67fdf494d8653ba30a883b6d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c31040d1362524ea1a3ee9961c3eb7b97c7c76709465df7590129430ddd3bb4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://db5ac0d0d3d08ebd2278af02df1e4639df5c635bfee2ec6fb8293e18c648af76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://db5ac0d0d3d08ebd2278af02df1e4639df5c635bfee2ec6fb8293e18c648af76\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T18:56:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:30Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:29Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:42Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:42 crc kubenswrapper[4792]: I0929 18:57:42.581096 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:42Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:42 crc kubenswrapper[4792]: I0929 18:57:42.594584 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:42Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:42 crc kubenswrapper[4792]: I0929 18:57:42.603940 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-c228l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc16dcda-372e-4aac-8c12-148bf93e8783\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://af2529526423852e215c3201a4d8807a880e07e9cf71d593f304a4a3c99900eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6mz5l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:52Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-c228l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:42Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:42 crc kubenswrapper[4792]: I0929 18:57:42.612834 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-rr4g5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1762a3e4-6068-48d9-9b1d-bd5b893803bb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7d6bc8edd4388d2cc81a9741b23060a4982a85a1ac8ec23f2052436cff8cd7fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:57:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xnnkn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8053630e82cf267f19de300922033d2cade4b754707ca7c0d7fbfd7e4957eefc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:57:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xnnkn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:57:06Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-rr4g5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:42Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:42 crc kubenswrapper[4792]: I0929 18:57:42.623034 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:42Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:42 crc kubenswrapper[4792]: I0929 18:57:42.631705 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-4gmtk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b84b9e91-b50e-4271-bfc8-be15652128c5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b32084075b7423c8211ca56595a2eb11add581b500043804cb09f13d07788bd6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lc999\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:55Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-4gmtk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:42Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:42 crc kubenswrapper[4792]: I0929 18:57:42.642545 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"49187618-8fed-4b0f-bdf8-800408f708fc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://325b543480e9e1abd49c6ce98398a79ef51983b8035774b2e88447ee547733c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://12d3875b8db9620798f766024b1bc43b78759f42e467b67aaf87f0b0154a8fad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://100ab44da711fddded7f88aa053b6a47d1c8302557d9ae6a56d4f744140e34bd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://af34e705a941f92c031edf3d214a902640010036401914f60e598a46043d5eb3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:29Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:42Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:42 crc kubenswrapper[4792]: I0929 18:57:42.652022 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:52Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:52Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cb29207afd9a5fb06242890aaf6d32f2f789cbf824b0246706e7214486ac529c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:42Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:42 crc kubenswrapper[4792]: I0929 18:57:42.663635 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:42 crc kubenswrapper[4792]: I0929 18:57:42.663668 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:42 crc kubenswrapper[4792]: I0929 18:57:42.663677 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:42 crc kubenswrapper[4792]: I0929 18:57:42.663694 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:42 crc kubenswrapper[4792]: I0929 18:57:42.663705 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:42Z","lastTransitionTime":"2025-09-29T18:57:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:42 crc kubenswrapper[4792]: I0929 18:57:42.665819 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0ae66548-086e-4ca9-bd6f-281ce46e7557\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b27d8e307d9f6545acd48d9a838dc98fec84ca2e48b357966af22144b8cd415f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://305645f1f10b20984067c3d0d32bc9a5936e191faecff2bb494be005fc471c65\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:53Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-p5q59\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:42Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:42 crc kubenswrapper[4792]: I0929 18:57:42.677947 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-rqbjv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"67c58ee5-e056-4e3e-91ed-a116350f2408\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://17470ef608c6b717c0346349c1e72046e200b3879df2772778878b0e83c05b7e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:57:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ms9xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cdd799a430b3a444b7ba74ae8c285de28790049390a462485812fe117f9dfbe8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cdd799a430b3a444b7ba74ae8c285de28790049390a462485812fe117f9dfbe8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T18:56:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ms9xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1d779fed3cf67ba40d6664f26d829858ec14749c48c09678b73d7fb8fe73c827\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1d779fed3cf67ba40d6664f26d829858ec14749c48c09678b73d7fb8fe73c827\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T18:56:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ms9xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3094a1172df2fd98e699c4d368a14584e51bce43389c9c6432e24e78d460a3eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3094a1172df2fd98e699c4d368a14584e51bce43389c9c6432e24e78d460a3eb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T18:56:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ms9xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://79dfa5c03ec31df7b6477646c437b7490658801c0b8f7fac5e9149e4c7a882bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://79dfa5c03ec31df7b6477646c437b7490658801c0b8f7fac5e9149e4c7a882bf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T18:56:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ms9xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://aed6e427a87d4a4617a9d1c9a4d37cf2f9815d1759336026545d563b1f9b6976\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aed6e427a87d4a4617a9d1c9a4d37cf2f9815d1759336026545d563b1f9b6976\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T18:57:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ms9xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0830e9f46c282fc2be6beeea2654758eb0b3a0a86b802f495928c846bc49f7ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0830e9f46c282fc2be6beeea2654758eb0b3a0a86b802f495928c846bc49f7ed\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T18:57:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T18:57:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ms9xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:53Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-rqbjv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:42Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:42 crc kubenswrapper[4792]: I0929 18:57:42.688033 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-v5b2m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fd292349-0e5a-4d80-b163-193aa43c98db\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:07Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:07Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:07Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d8ps7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d8ps7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:57:07Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-v5b2m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:42Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:42 crc kubenswrapper[4792]: I0929 18:57:42.766368 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:42 crc kubenswrapper[4792]: I0929 18:57:42.766406 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:42 crc kubenswrapper[4792]: I0929 18:57:42.766418 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:42 crc kubenswrapper[4792]: I0929 18:57:42.766457 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:42 crc kubenswrapper[4792]: I0929 18:57:42.766469 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:42Z","lastTransitionTime":"2025-09-29T18:57:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:42 crc kubenswrapper[4792]: I0929 18:57:42.868800 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:42 crc kubenswrapper[4792]: I0929 18:57:42.868871 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:42 crc kubenswrapper[4792]: I0929 18:57:42.868885 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:42 crc kubenswrapper[4792]: I0929 18:57:42.868902 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:42 crc kubenswrapper[4792]: I0929 18:57:42.868914 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:42Z","lastTransitionTime":"2025-09-29T18:57:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:42 crc kubenswrapper[4792]: I0929 18:57:42.971247 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:42 crc kubenswrapper[4792]: I0929 18:57:42.971285 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:42 crc kubenswrapper[4792]: I0929 18:57:42.971293 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:42 crc kubenswrapper[4792]: I0929 18:57:42.971307 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:42 crc kubenswrapper[4792]: I0929 18:57:42.971317 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:42Z","lastTransitionTime":"2025-09-29T18:57:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:43 crc kubenswrapper[4792]: I0929 18:57:43.015209 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 18:57:43 crc kubenswrapper[4792]: I0929 18:57:43.015256 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 18:57:43 crc kubenswrapper[4792]: I0929 18:57:43.015214 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 18:57:43 crc kubenswrapper[4792]: I0929 18:57:43.015354 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-v5b2m" Sep 29 18:57:43 crc kubenswrapper[4792]: E0929 18:57:43.015448 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 18:57:43 crc kubenswrapper[4792]: E0929 18:57:43.015556 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 18:57:43 crc kubenswrapper[4792]: E0929 18:57:43.015630 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 18:57:43 crc kubenswrapper[4792]: E0929 18:57:43.015696 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-v5b2m" podUID="fd292349-0e5a-4d80-b163-193aa43c98db" Sep 29 18:57:43 crc kubenswrapper[4792]: I0929 18:57:43.073380 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:43 crc kubenswrapper[4792]: I0929 18:57:43.073419 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:43 crc kubenswrapper[4792]: I0929 18:57:43.073427 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:43 crc kubenswrapper[4792]: I0929 18:57:43.073442 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:43 crc kubenswrapper[4792]: I0929 18:57:43.073454 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:43Z","lastTransitionTime":"2025-09-29T18:57:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:43 crc kubenswrapper[4792]: I0929 18:57:43.175425 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:43 crc kubenswrapper[4792]: I0929 18:57:43.175466 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:43 crc kubenswrapper[4792]: I0929 18:57:43.175476 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:43 crc kubenswrapper[4792]: I0929 18:57:43.175495 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:43 crc kubenswrapper[4792]: I0929 18:57:43.175512 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:43Z","lastTransitionTime":"2025-09-29T18:57:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:43 crc kubenswrapper[4792]: I0929 18:57:43.277613 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:43 crc kubenswrapper[4792]: I0929 18:57:43.277647 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:43 crc kubenswrapper[4792]: I0929 18:57:43.277658 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:43 crc kubenswrapper[4792]: I0929 18:57:43.277672 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:43 crc kubenswrapper[4792]: I0929 18:57:43.277682 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:43Z","lastTransitionTime":"2025-09-29T18:57:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:43 crc kubenswrapper[4792]: I0929 18:57:43.379813 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:43 crc kubenswrapper[4792]: I0929 18:57:43.379877 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:43 crc kubenswrapper[4792]: I0929 18:57:43.379889 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:43 crc kubenswrapper[4792]: I0929 18:57:43.379902 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:43 crc kubenswrapper[4792]: I0929 18:57:43.379911 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:43Z","lastTransitionTime":"2025-09-29T18:57:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:43 crc kubenswrapper[4792]: I0929 18:57:43.476117 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-5hwvp_100876d3-2539-47f1-91fa-0f91456ccac1/kube-multus/0.log" Sep 29 18:57:43 crc kubenswrapper[4792]: I0929 18:57:43.476171 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-5hwvp" event={"ID":"100876d3-2539-47f1-91fa-0f91456ccac1","Type":"ContainerStarted","Data":"e36db1c84da57d66a764493ff741136d4bec9e23eb8f9f9517fd82dd9f829e8c"} Sep 29 18:57:43 crc kubenswrapper[4792]: I0929 18:57:43.481892 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:43 crc kubenswrapper[4792]: I0929 18:57:43.481932 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:43 crc kubenswrapper[4792]: I0929 18:57:43.481942 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:43 crc kubenswrapper[4792]: I0929 18:57:43.481975 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:43 crc kubenswrapper[4792]: I0929 18:57:43.481987 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:43Z","lastTransitionTime":"2025-09-29T18:57:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:43 crc kubenswrapper[4792]: I0929 18:57:43.492200 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-5hwvp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"100876d3-2539-47f1-91fa-0f91456ccac1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e36db1c84da57d66a764493ff741136d4bec9e23eb8f9f9517fd82dd9f829e8c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3ff4d11cfba0349ddf3f5a14c525716cfdff95c71698634e8feca328d6e41e2d\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T18:57:41Z\\\",\\\"message\\\":\\\"2025-09-29T18:56:56+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_bcd172c3-8b42-4b95-9328-75a122f6b049\\\\n2025-09-29T18:56:56+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_bcd172c3-8b42-4b95-9328-75a122f6b049 to /host/opt/cni/bin/\\\\n2025-09-29T18:56:56Z [verbose] multus-daemon started\\\\n2025-09-29T18:56:56Z [verbose] Readiness Indicator file check\\\\n2025-09-29T18:57:41Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:54Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:57:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfblz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:53Z\\\"}}\" for pod \"openshift-multus\"/\"multus-5hwvp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:43Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:43 crc kubenswrapper[4792]: I0929 18:57:43.507771 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"10bc9cb6-78d3-43a6-8276-db1cb1c116e0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://238035b6ad975064a44e7e9e760ae9f09c9ff2735ecc69f65df3fc3176f6d3b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://659f798faed7d5c35bf7959b8e42a37f2289854714d513962716cb73a0674a27\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://38d29fa86782b007a60cbc1ea8f2ccc250dabb38c84eb61931b05fbc170e6538\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f292b6e3b4a31cc851066c3112871836e2c896dea8913da0d3c5579fe5ebb65\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0fdb8c0687f0a5ce31078f6d7a9b643c41ad23199eff4b2878403ee5fd31f69f\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T18:56:45Z\\\",\\\"message\\\":\\\"9 18:56:45.310775 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0929 18:56:45.310981 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI0929 18:56:45.312802 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0929 18:56:45.312836 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0929 18:56:45.312870 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0929 18:56:45.312900 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0929 18:56:45.312941 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI0929 18:56:45.312944 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0929 18:56:45.312985 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI0929 18:56:45.313033 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI0929 18:56:45.313121 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-682747971/tls.crt::/tmp/serving-cert-682747971/tls.key\\\\\\\"\\\\nI0929 18:56:45.313163 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-682747971/tls.crt::/tmp/serving-cert-682747971/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1759172204\\\\\\\\\\\\\\\" (2025-09-29 18:56:43 +0000 UTC to 2025-10-29 18:56:44 +0000 UTC (now=2025-09-29 18:56:45.313121069 +0000 UTC))\\\\\\\"\\\\nF0929 18:56:45.313206 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:44Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:57:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://07171a8c3c7812c016ee534ff1332f697d2b2cdfa70fc9d94ae6a5f312e0e433\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:32Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6e733b73d0293211bf2e8e97dc7db49c34e8ac1ef1e6e19013183d8518345959\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6e733b73d0293211bf2e8e97dc7db49c34e8ac1ef1e6e19013183d8518345959\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T18:56:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:29Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:43Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:43 crc kubenswrapper[4792]: I0929 18:57:43.519100 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ba8b9fef5faf6504a0e363f092cc9f60b03723775a0a0624b6302b3dac43a7ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:43Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:43 crc kubenswrapper[4792]: I0929 18:57:43.529810 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8067c4cf598ce2d361c4a76b51ef3cf14d1fc84ad7ee193d76e20cd980f197be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://693ef3ee15f0b8762a16adc20435397e073dea4b0028f4175899cb956eaab303\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:43Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:43 crc kubenswrapper[4792]: I0929 18:57:43.544813 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-hr4cm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"716c5fdd-0e02-4066-9210-93d805b6fe81\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:53Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:53Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7b64445ce1e067504326c5005136522f885ba8796579cfb651019d2372a89173\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://46a3cf64e8fd5f5c75be0dd56175bd00e95e2780c73e39558e3b68ca1e6a44bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3408e50d82d1e7f50d9cd4fb2b4e078059bbc4daba10ca93c3cab56d4fe190be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a5023531ae972c8f19f5fbf8cdb3c4040f1b63d5d7b9d00e885607f0f84c88a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e9625b3628f291ecaa686da104d719695bd8c46eb46d08f9eccab27a2013627\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3a44c0899a9afeaa74bb22565c3f9514603ce1b83f9794539f677d067785925\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1121dcf30430fdbc7f6353dacdf1f0233d9053a2acfd8e7e248bc6e9faaf66e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1121dcf30430fdbc7f6353dacdf1f0233d9053a2acfd8e7e248bc6e9faaf66e2\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T18:57:20Z\\\",\\\"message\\\":\\\".861680 6354 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:20Z is after 2025-08-24T17:21:41Z]\\\\nI0929 18:57:20.861689 6354 base_network_controller_pods.go:477] [default/openshift-network-diagnostics/network-check-source-55646444c4-trplf] creating logical port openshift-network-diagnostics_network-check-source-55646444c4-trplf for pod on switch crc\\\\nI0929 18:57:20.861690 6354 services_controller.go:451] Built service openshift-kube-scheduler-operator/metrics cluster-wide LB for network=default: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-kube-scheduler-operator/metrics_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", E\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T18:57:20Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-hr4cm_openshift-ovn-kubernetes(716c5fdd-0e02-4066-9210-93d805b6fe81)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7138892e31e3d1949d0ae4789515fc0fd9868469eb14de1464a2f59786b85f08\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d0516004c2ea4a5711f5e00dcfa01fd5c8d0c0d0d60fd31b0d7da586cd83a820\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d0516004c2ea4a5711f5e00dcfa01fd5c8d0c0d0d60fd31b0d7da586cd83a820\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T18:56:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:53Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-hr4cm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:43Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:43 crc kubenswrapper[4792]: I0929 18:57:43.554023 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-rr4g5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1762a3e4-6068-48d9-9b1d-bd5b893803bb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7d6bc8edd4388d2cc81a9741b23060a4982a85a1ac8ec23f2052436cff8cd7fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:57:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xnnkn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8053630e82cf267f19de300922033d2cade4b754707ca7c0d7fbfd7e4957eefc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:57:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xnnkn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:57:06Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-rr4g5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:43Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:43 crc kubenswrapper[4792]: I0929 18:57:43.563017 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4df360a8-146c-4e9e-8e52-498553bdf779\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1f4bce20bf980ce88d41df2f3c40acfc93739122e3173ea15ceb0122219338f8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d2e3b84a17bd084dfa82505af7e250aea64057db67fdf494d8653ba30a883b6d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c31040d1362524ea1a3ee9961c3eb7b97c7c76709465df7590129430ddd3bb4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://db5ac0d0d3d08ebd2278af02df1e4639df5c635bfee2ec6fb8293e18c648af76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://db5ac0d0d3d08ebd2278af02df1e4639df5c635bfee2ec6fb8293e18c648af76\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T18:56:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:30Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:29Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:43Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:43 crc kubenswrapper[4792]: I0929 18:57:43.573075 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:43Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:43 crc kubenswrapper[4792]: I0929 18:57:43.583659 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:43 crc kubenswrapper[4792]: I0929 18:57:43.583690 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:43 crc kubenswrapper[4792]: I0929 18:57:43.583698 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:43 crc kubenswrapper[4792]: I0929 18:57:43.583716 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:43 crc kubenswrapper[4792]: I0929 18:57:43.583725 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:43Z","lastTransitionTime":"2025-09-29T18:57:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:43 crc kubenswrapper[4792]: I0929 18:57:43.584427 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:43Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:43 crc kubenswrapper[4792]: I0929 18:57:43.593590 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-c228l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc16dcda-372e-4aac-8c12-148bf93e8783\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://af2529526423852e215c3201a4d8807a880e07e9cf71d593f304a4a3c99900eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6mz5l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:52Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-c228l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:43Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:43 crc kubenswrapper[4792]: I0929 18:57:43.603632 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:43Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:43 crc kubenswrapper[4792]: I0929 18:57:43.611790 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-4gmtk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b84b9e91-b50e-4271-bfc8-be15652128c5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b32084075b7423c8211ca56595a2eb11add581b500043804cb09f13d07788bd6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lc999\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:55Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-4gmtk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:43Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:43 crc kubenswrapper[4792]: I0929 18:57:43.619964 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-v5b2m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fd292349-0e5a-4d80-b163-193aa43c98db\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:07Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:07Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:07Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d8ps7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d8ps7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:57:07Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-v5b2m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:43Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:43 crc kubenswrapper[4792]: I0929 18:57:43.631455 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"49187618-8fed-4b0f-bdf8-800408f708fc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://325b543480e9e1abd49c6ce98398a79ef51983b8035774b2e88447ee547733c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://12d3875b8db9620798f766024b1bc43b78759f42e467b67aaf87f0b0154a8fad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://100ab44da711fddded7f88aa053b6a47d1c8302557d9ae6a56d4f744140e34bd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://af34e705a941f92c031edf3d214a902640010036401914f60e598a46043d5eb3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:29Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:43Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:43 crc kubenswrapper[4792]: I0929 18:57:43.642427 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:52Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:52Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cb29207afd9a5fb06242890aaf6d32f2f789cbf824b0246706e7214486ac529c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:43Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:43 crc kubenswrapper[4792]: I0929 18:57:43.652664 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0ae66548-086e-4ca9-bd6f-281ce46e7557\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b27d8e307d9f6545acd48d9a838dc98fec84ca2e48b357966af22144b8cd415f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://305645f1f10b20984067c3d0d32bc9a5936e191faecff2bb494be005fc471c65\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:53Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-p5q59\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:43Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:43 crc kubenswrapper[4792]: I0929 18:57:43.667933 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-rqbjv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"67c58ee5-e056-4e3e-91ed-a116350f2408\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://17470ef608c6b717c0346349c1e72046e200b3879df2772778878b0e83c05b7e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:57:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ms9xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cdd799a430b3a444b7ba74ae8c285de28790049390a462485812fe117f9dfbe8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cdd799a430b3a444b7ba74ae8c285de28790049390a462485812fe117f9dfbe8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T18:56:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ms9xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1d779fed3cf67ba40d6664f26d829858ec14749c48c09678b73d7fb8fe73c827\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1d779fed3cf67ba40d6664f26d829858ec14749c48c09678b73d7fb8fe73c827\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T18:56:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ms9xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3094a1172df2fd98e699c4d368a14584e51bce43389c9c6432e24e78d460a3eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3094a1172df2fd98e699c4d368a14584e51bce43389c9c6432e24e78d460a3eb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T18:56:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ms9xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://79dfa5c03ec31df7b6477646c437b7490658801c0b8f7fac5e9149e4c7a882bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://79dfa5c03ec31df7b6477646c437b7490658801c0b8f7fac5e9149e4c7a882bf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T18:56:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ms9xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://aed6e427a87d4a4617a9d1c9a4d37cf2f9815d1759336026545d563b1f9b6976\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aed6e427a87d4a4617a9d1c9a4d37cf2f9815d1759336026545d563b1f9b6976\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T18:57:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ms9xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0830e9f46c282fc2be6beeea2654758eb0b3a0a86b802f495928c846bc49f7ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0830e9f46c282fc2be6beeea2654758eb0b3a0a86b802f495928c846bc49f7ed\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T18:57:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T18:57:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ms9xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:53Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-rqbjv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:43Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:43 crc kubenswrapper[4792]: I0929 18:57:43.686592 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:43 crc kubenswrapper[4792]: I0929 18:57:43.686632 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:43 crc kubenswrapper[4792]: I0929 18:57:43.686645 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:43 crc kubenswrapper[4792]: I0929 18:57:43.686664 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:43 crc kubenswrapper[4792]: I0929 18:57:43.686678 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:43Z","lastTransitionTime":"2025-09-29T18:57:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:43 crc kubenswrapper[4792]: I0929 18:57:43.789607 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:43 crc kubenswrapper[4792]: I0929 18:57:43.789665 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:43 crc kubenswrapper[4792]: I0929 18:57:43.789674 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:43 crc kubenswrapper[4792]: I0929 18:57:43.789690 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:43 crc kubenswrapper[4792]: I0929 18:57:43.789699 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:43Z","lastTransitionTime":"2025-09-29T18:57:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:43 crc kubenswrapper[4792]: I0929 18:57:43.892125 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:43 crc kubenswrapper[4792]: I0929 18:57:43.892182 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:43 crc kubenswrapper[4792]: I0929 18:57:43.892192 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:43 crc kubenswrapper[4792]: I0929 18:57:43.892205 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:43 crc kubenswrapper[4792]: I0929 18:57:43.892213 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:43Z","lastTransitionTime":"2025-09-29T18:57:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:43 crc kubenswrapper[4792]: I0929 18:57:43.995001 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:43 crc kubenswrapper[4792]: I0929 18:57:43.995044 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:43 crc kubenswrapper[4792]: I0929 18:57:43.995058 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:43 crc kubenswrapper[4792]: I0929 18:57:43.995078 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:43 crc kubenswrapper[4792]: I0929 18:57:43.995091 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:43Z","lastTransitionTime":"2025-09-29T18:57:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:44 crc kubenswrapper[4792]: I0929 18:57:44.097509 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:44 crc kubenswrapper[4792]: I0929 18:57:44.097531 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:44 crc kubenswrapper[4792]: I0929 18:57:44.097539 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:44 crc kubenswrapper[4792]: I0929 18:57:44.097550 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:44 crc kubenswrapper[4792]: I0929 18:57:44.097558 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:44Z","lastTransitionTime":"2025-09-29T18:57:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:44 crc kubenswrapper[4792]: I0929 18:57:44.200143 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:44 crc kubenswrapper[4792]: I0929 18:57:44.200186 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:44 crc kubenswrapper[4792]: I0929 18:57:44.200229 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:44 crc kubenswrapper[4792]: I0929 18:57:44.200251 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:44 crc kubenswrapper[4792]: I0929 18:57:44.200266 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:44Z","lastTransitionTime":"2025-09-29T18:57:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:44 crc kubenswrapper[4792]: I0929 18:57:44.302795 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:44 crc kubenswrapper[4792]: I0929 18:57:44.302831 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:44 crc kubenswrapper[4792]: I0929 18:57:44.302863 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:44 crc kubenswrapper[4792]: I0929 18:57:44.302881 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:44 crc kubenswrapper[4792]: I0929 18:57:44.302893 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:44Z","lastTransitionTime":"2025-09-29T18:57:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:44 crc kubenswrapper[4792]: I0929 18:57:44.405490 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:44 crc kubenswrapper[4792]: I0929 18:57:44.405520 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:44 crc kubenswrapper[4792]: I0929 18:57:44.405532 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:44 crc kubenswrapper[4792]: I0929 18:57:44.405553 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:44 crc kubenswrapper[4792]: I0929 18:57:44.405561 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:44Z","lastTransitionTime":"2025-09-29T18:57:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:44 crc kubenswrapper[4792]: I0929 18:57:44.507920 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:44 crc kubenswrapper[4792]: I0929 18:57:44.507988 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:44 crc kubenswrapper[4792]: I0929 18:57:44.508009 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:44 crc kubenswrapper[4792]: I0929 18:57:44.508216 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:44 crc kubenswrapper[4792]: I0929 18:57:44.508261 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:44Z","lastTransitionTime":"2025-09-29T18:57:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:44 crc kubenswrapper[4792]: I0929 18:57:44.610993 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:44 crc kubenswrapper[4792]: I0929 18:57:44.611029 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:44 crc kubenswrapper[4792]: I0929 18:57:44.611037 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:44 crc kubenswrapper[4792]: I0929 18:57:44.611050 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:44 crc kubenswrapper[4792]: I0929 18:57:44.611058 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:44Z","lastTransitionTime":"2025-09-29T18:57:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:44 crc kubenswrapper[4792]: I0929 18:57:44.713523 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:44 crc kubenswrapper[4792]: I0929 18:57:44.713562 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:44 crc kubenswrapper[4792]: I0929 18:57:44.713572 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:44 crc kubenswrapper[4792]: I0929 18:57:44.713587 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:44 crc kubenswrapper[4792]: I0929 18:57:44.713598 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:44Z","lastTransitionTime":"2025-09-29T18:57:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:44 crc kubenswrapper[4792]: I0929 18:57:44.815759 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:44 crc kubenswrapper[4792]: I0929 18:57:44.815806 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:44 crc kubenswrapper[4792]: I0929 18:57:44.815819 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:44 crc kubenswrapper[4792]: I0929 18:57:44.815836 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:44 crc kubenswrapper[4792]: I0929 18:57:44.815870 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:44Z","lastTransitionTime":"2025-09-29T18:57:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:44 crc kubenswrapper[4792]: I0929 18:57:44.918071 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:44 crc kubenswrapper[4792]: I0929 18:57:44.918319 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:44 crc kubenswrapper[4792]: I0929 18:57:44.918390 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:44 crc kubenswrapper[4792]: I0929 18:57:44.918458 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:44 crc kubenswrapper[4792]: I0929 18:57:44.918547 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:44Z","lastTransitionTime":"2025-09-29T18:57:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:45 crc kubenswrapper[4792]: I0929 18:57:45.014440 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 18:57:45 crc kubenswrapper[4792]: I0929 18:57:45.014479 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-v5b2m" Sep 29 18:57:45 crc kubenswrapper[4792]: I0929 18:57:45.014498 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 18:57:45 crc kubenswrapper[4792]: E0929 18:57:45.014874 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-v5b2m" podUID="fd292349-0e5a-4d80-b163-193aa43c98db" Sep 29 18:57:45 crc kubenswrapper[4792]: E0929 18:57:45.014738 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 18:57:45 crc kubenswrapper[4792]: I0929 18:57:45.014534 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 18:57:45 crc kubenswrapper[4792]: E0929 18:57:45.014981 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 18:57:45 crc kubenswrapper[4792]: E0929 18:57:45.015037 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 18:57:45 crc kubenswrapper[4792]: I0929 18:57:45.019941 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:45 crc kubenswrapper[4792]: I0929 18:57:45.019981 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:45 crc kubenswrapper[4792]: I0929 18:57:45.019993 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:45 crc kubenswrapper[4792]: I0929 18:57:45.020009 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:45 crc kubenswrapper[4792]: I0929 18:57:45.020022 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:45Z","lastTransitionTime":"2025-09-29T18:57:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:45 crc kubenswrapper[4792]: I0929 18:57:45.122223 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:45 crc kubenswrapper[4792]: I0929 18:57:45.122267 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:45 crc kubenswrapper[4792]: I0929 18:57:45.122282 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:45 crc kubenswrapper[4792]: I0929 18:57:45.122299 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:45 crc kubenswrapper[4792]: I0929 18:57:45.122311 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:45Z","lastTransitionTime":"2025-09-29T18:57:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:45 crc kubenswrapper[4792]: I0929 18:57:45.224320 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:45 crc kubenswrapper[4792]: I0929 18:57:45.224507 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:45 crc kubenswrapper[4792]: I0929 18:57:45.224565 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:45 crc kubenswrapper[4792]: I0929 18:57:45.224649 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:45 crc kubenswrapper[4792]: I0929 18:57:45.224738 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:45Z","lastTransitionTime":"2025-09-29T18:57:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:45 crc kubenswrapper[4792]: I0929 18:57:45.330641 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:45 crc kubenswrapper[4792]: I0929 18:57:45.330686 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:45 crc kubenswrapper[4792]: I0929 18:57:45.330695 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:45 crc kubenswrapper[4792]: I0929 18:57:45.330709 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:45 crc kubenswrapper[4792]: I0929 18:57:45.330718 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:45Z","lastTransitionTime":"2025-09-29T18:57:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:45 crc kubenswrapper[4792]: I0929 18:57:45.433150 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:45 crc kubenswrapper[4792]: I0929 18:57:45.433178 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:45 crc kubenswrapper[4792]: I0929 18:57:45.433189 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:45 crc kubenswrapper[4792]: I0929 18:57:45.433205 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:45 crc kubenswrapper[4792]: I0929 18:57:45.433215 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:45Z","lastTransitionTime":"2025-09-29T18:57:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:45 crc kubenswrapper[4792]: I0929 18:57:45.535160 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:45 crc kubenswrapper[4792]: I0929 18:57:45.535220 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:45 crc kubenswrapper[4792]: I0929 18:57:45.535241 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:45 crc kubenswrapper[4792]: I0929 18:57:45.535262 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:45 crc kubenswrapper[4792]: I0929 18:57:45.535277 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:45Z","lastTransitionTime":"2025-09-29T18:57:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:45 crc kubenswrapper[4792]: I0929 18:57:45.637434 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:45 crc kubenswrapper[4792]: I0929 18:57:45.637470 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:45 crc kubenswrapper[4792]: I0929 18:57:45.637480 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:45 crc kubenswrapper[4792]: I0929 18:57:45.637493 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:45 crc kubenswrapper[4792]: I0929 18:57:45.637503 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:45Z","lastTransitionTime":"2025-09-29T18:57:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:45 crc kubenswrapper[4792]: I0929 18:57:45.739433 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:45 crc kubenswrapper[4792]: I0929 18:57:45.739464 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:45 crc kubenswrapper[4792]: I0929 18:57:45.739471 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:45 crc kubenswrapper[4792]: I0929 18:57:45.739484 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:45 crc kubenswrapper[4792]: I0929 18:57:45.739492 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:45Z","lastTransitionTime":"2025-09-29T18:57:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:45 crc kubenswrapper[4792]: I0929 18:57:45.842345 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:45 crc kubenswrapper[4792]: I0929 18:57:45.842380 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:45 crc kubenswrapper[4792]: I0929 18:57:45.842388 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:45 crc kubenswrapper[4792]: I0929 18:57:45.842422 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:45 crc kubenswrapper[4792]: I0929 18:57:45.842430 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:45Z","lastTransitionTime":"2025-09-29T18:57:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:45 crc kubenswrapper[4792]: I0929 18:57:45.861383 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:45 crc kubenswrapper[4792]: I0929 18:57:45.861436 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:45 crc kubenswrapper[4792]: I0929 18:57:45.861453 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:45 crc kubenswrapper[4792]: I0929 18:57:45.861477 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:45 crc kubenswrapper[4792]: I0929 18:57:45.861494 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:45Z","lastTransitionTime":"2025-09-29T18:57:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:45 crc kubenswrapper[4792]: E0929 18:57:45.879624 4792 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T18:57:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:45Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T18:57:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:45Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T18:57:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:45Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T18:57:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:45Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"2b56982a-4dd9-4681-8997-0d414fe55985\\\",\\\"systemUUID\\\":\\\"798197c6-3029-4938-8b57-256852c71a3e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:45Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:45 crc kubenswrapper[4792]: I0929 18:57:45.885359 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:45 crc kubenswrapper[4792]: I0929 18:57:45.885612 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:45 crc kubenswrapper[4792]: I0929 18:57:45.885747 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:45 crc kubenswrapper[4792]: I0929 18:57:45.885919 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:45 crc kubenswrapper[4792]: I0929 18:57:45.886075 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:45Z","lastTransitionTime":"2025-09-29T18:57:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:45 crc kubenswrapper[4792]: E0929 18:57:45.906202 4792 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T18:57:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:45Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T18:57:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:45Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T18:57:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:45Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T18:57:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:45Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"2b56982a-4dd9-4681-8997-0d414fe55985\\\",\\\"systemUUID\\\":\\\"798197c6-3029-4938-8b57-256852c71a3e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:45Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:45 crc kubenswrapper[4792]: I0929 18:57:45.910806 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:45 crc kubenswrapper[4792]: I0929 18:57:45.910828 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:45 crc kubenswrapper[4792]: I0929 18:57:45.910836 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:45 crc kubenswrapper[4792]: I0929 18:57:45.910863 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:45 crc kubenswrapper[4792]: I0929 18:57:45.910875 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:45Z","lastTransitionTime":"2025-09-29T18:57:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:45 crc kubenswrapper[4792]: E0929 18:57:45.928900 4792 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T18:57:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:45Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T18:57:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:45Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T18:57:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:45Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T18:57:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:45Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"2b56982a-4dd9-4681-8997-0d414fe55985\\\",\\\"systemUUID\\\":\\\"798197c6-3029-4938-8b57-256852c71a3e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:45Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:45 crc kubenswrapper[4792]: I0929 18:57:45.934130 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:45 crc kubenswrapper[4792]: I0929 18:57:45.934193 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:45 crc kubenswrapper[4792]: I0929 18:57:45.934216 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:45 crc kubenswrapper[4792]: I0929 18:57:45.934246 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:45 crc kubenswrapper[4792]: I0929 18:57:45.934267 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:45Z","lastTransitionTime":"2025-09-29T18:57:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:45 crc kubenswrapper[4792]: E0929 18:57:45.949936 4792 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T18:57:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:45Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T18:57:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:45Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T18:57:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:45Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T18:57:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:45Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"2b56982a-4dd9-4681-8997-0d414fe55985\\\",\\\"systemUUID\\\":\\\"798197c6-3029-4938-8b57-256852c71a3e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:45Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:45 crc kubenswrapper[4792]: I0929 18:57:45.954540 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:45 crc kubenswrapper[4792]: I0929 18:57:45.954580 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:45 crc kubenswrapper[4792]: I0929 18:57:45.954596 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:45 crc kubenswrapper[4792]: I0929 18:57:45.954620 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:45 crc kubenswrapper[4792]: I0929 18:57:45.954637 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:45Z","lastTransitionTime":"2025-09-29T18:57:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:45 crc kubenswrapper[4792]: E0929 18:57:45.973326 4792 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T18:57:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:45Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T18:57:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:45Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T18:57:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:45Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T18:57:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:45Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"2b56982a-4dd9-4681-8997-0d414fe55985\\\",\\\"systemUUID\\\":\\\"798197c6-3029-4938-8b57-256852c71a3e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:45Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:45 crc kubenswrapper[4792]: E0929 18:57:45.973532 4792 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Sep 29 18:57:45 crc kubenswrapper[4792]: I0929 18:57:45.975056 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:45 crc kubenswrapper[4792]: I0929 18:57:45.975085 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:45 crc kubenswrapper[4792]: I0929 18:57:45.975096 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:45 crc kubenswrapper[4792]: I0929 18:57:45.975114 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:45 crc kubenswrapper[4792]: I0929 18:57:45.975129 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:45Z","lastTransitionTime":"2025-09-29T18:57:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:46 crc kubenswrapper[4792]: I0929 18:57:46.077706 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:46 crc kubenswrapper[4792]: I0929 18:57:46.078118 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:46 crc kubenswrapper[4792]: I0929 18:57:46.078252 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:46 crc kubenswrapper[4792]: I0929 18:57:46.078450 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:46 crc kubenswrapper[4792]: I0929 18:57:46.078640 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:46Z","lastTransitionTime":"2025-09-29T18:57:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:46 crc kubenswrapper[4792]: I0929 18:57:46.181505 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:46 crc kubenswrapper[4792]: I0929 18:57:46.181812 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:46 crc kubenswrapper[4792]: I0929 18:57:46.181983 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:46 crc kubenswrapper[4792]: I0929 18:57:46.182093 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:46 crc kubenswrapper[4792]: I0929 18:57:46.182171 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:46Z","lastTransitionTime":"2025-09-29T18:57:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:46 crc kubenswrapper[4792]: I0929 18:57:46.284924 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:46 crc kubenswrapper[4792]: I0929 18:57:46.284985 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:46 crc kubenswrapper[4792]: I0929 18:57:46.285007 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:46 crc kubenswrapper[4792]: I0929 18:57:46.285038 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:46 crc kubenswrapper[4792]: I0929 18:57:46.285060 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:46Z","lastTransitionTime":"2025-09-29T18:57:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:46 crc kubenswrapper[4792]: I0929 18:57:46.387759 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:46 crc kubenswrapper[4792]: I0929 18:57:46.387790 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:46 crc kubenswrapper[4792]: I0929 18:57:46.387798 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:46 crc kubenswrapper[4792]: I0929 18:57:46.387810 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:46 crc kubenswrapper[4792]: I0929 18:57:46.387819 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:46Z","lastTransitionTime":"2025-09-29T18:57:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:46 crc kubenswrapper[4792]: I0929 18:57:46.490528 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:46 crc kubenswrapper[4792]: I0929 18:57:46.490831 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:46 crc kubenswrapper[4792]: I0929 18:57:46.491142 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:46 crc kubenswrapper[4792]: I0929 18:57:46.491339 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:46 crc kubenswrapper[4792]: I0929 18:57:46.491484 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:46Z","lastTransitionTime":"2025-09-29T18:57:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:46 crc kubenswrapper[4792]: I0929 18:57:46.594110 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:46 crc kubenswrapper[4792]: I0929 18:57:46.594157 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:46 crc kubenswrapper[4792]: I0929 18:57:46.594171 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:46 crc kubenswrapper[4792]: I0929 18:57:46.594189 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:46 crc kubenswrapper[4792]: I0929 18:57:46.594203 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:46Z","lastTransitionTime":"2025-09-29T18:57:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:46 crc kubenswrapper[4792]: I0929 18:57:46.696272 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:46 crc kubenswrapper[4792]: I0929 18:57:46.696307 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:46 crc kubenswrapper[4792]: I0929 18:57:46.696317 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:46 crc kubenswrapper[4792]: I0929 18:57:46.696332 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:46 crc kubenswrapper[4792]: I0929 18:57:46.696341 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:46Z","lastTransitionTime":"2025-09-29T18:57:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:46 crc kubenswrapper[4792]: I0929 18:57:46.799734 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:46 crc kubenswrapper[4792]: I0929 18:57:46.799800 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:46 crc kubenswrapper[4792]: I0929 18:57:46.799832 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:46 crc kubenswrapper[4792]: I0929 18:57:46.799895 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:46 crc kubenswrapper[4792]: I0929 18:57:46.799918 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:46Z","lastTransitionTime":"2025-09-29T18:57:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:46 crc kubenswrapper[4792]: I0929 18:57:46.902881 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:46 crc kubenswrapper[4792]: I0929 18:57:46.902946 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:46 crc kubenswrapper[4792]: I0929 18:57:46.902975 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:46 crc kubenswrapper[4792]: I0929 18:57:46.903002 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:46 crc kubenswrapper[4792]: I0929 18:57:46.903024 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:46Z","lastTransitionTime":"2025-09-29T18:57:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:47 crc kubenswrapper[4792]: I0929 18:57:47.005143 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:47 crc kubenswrapper[4792]: I0929 18:57:47.005186 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:47 crc kubenswrapper[4792]: I0929 18:57:47.005202 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:47 crc kubenswrapper[4792]: I0929 18:57:47.005220 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:47 crc kubenswrapper[4792]: I0929 18:57:47.005233 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:47Z","lastTransitionTime":"2025-09-29T18:57:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:47 crc kubenswrapper[4792]: I0929 18:57:47.014482 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 18:57:47 crc kubenswrapper[4792]: I0929 18:57:47.014529 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-v5b2m" Sep 29 18:57:47 crc kubenswrapper[4792]: E0929 18:57:47.014633 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 18:57:47 crc kubenswrapper[4792]: I0929 18:57:47.014482 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 18:57:47 crc kubenswrapper[4792]: E0929 18:57:47.014736 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-v5b2m" podUID="fd292349-0e5a-4d80-b163-193aa43c98db" Sep 29 18:57:47 crc kubenswrapper[4792]: E0929 18:57:47.014888 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 18:57:47 crc kubenswrapper[4792]: I0929 18:57:47.015080 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 18:57:47 crc kubenswrapper[4792]: E0929 18:57:47.015173 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 18:57:47 crc kubenswrapper[4792]: I0929 18:57:47.108200 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:47 crc kubenswrapper[4792]: I0929 18:57:47.108260 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:47 crc kubenswrapper[4792]: I0929 18:57:47.108285 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:47 crc kubenswrapper[4792]: I0929 18:57:47.108316 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:47 crc kubenswrapper[4792]: I0929 18:57:47.108340 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:47Z","lastTransitionTime":"2025-09-29T18:57:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:47 crc kubenswrapper[4792]: I0929 18:57:47.211299 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:47 crc kubenswrapper[4792]: I0929 18:57:47.211529 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:47 crc kubenswrapper[4792]: I0929 18:57:47.211615 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:47 crc kubenswrapper[4792]: I0929 18:57:47.211704 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:47 crc kubenswrapper[4792]: I0929 18:57:47.211781 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:47Z","lastTransitionTime":"2025-09-29T18:57:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:47 crc kubenswrapper[4792]: I0929 18:57:47.315507 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:47 crc kubenswrapper[4792]: I0929 18:57:47.315544 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:47 crc kubenswrapper[4792]: I0929 18:57:47.315561 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:47 crc kubenswrapper[4792]: I0929 18:57:47.315581 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:47 crc kubenswrapper[4792]: I0929 18:57:47.315597 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:47Z","lastTransitionTime":"2025-09-29T18:57:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:47 crc kubenswrapper[4792]: I0929 18:57:47.418757 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:47 crc kubenswrapper[4792]: I0929 18:57:47.419316 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:47 crc kubenswrapper[4792]: I0929 18:57:47.419727 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:47 crc kubenswrapper[4792]: I0929 18:57:47.420269 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:47 crc kubenswrapper[4792]: I0929 18:57:47.420780 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:47Z","lastTransitionTime":"2025-09-29T18:57:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:47 crc kubenswrapper[4792]: I0929 18:57:47.525478 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:47 crc kubenswrapper[4792]: I0929 18:57:47.525550 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:47 crc kubenswrapper[4792]: I0929 18:57:47.525573 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:47 crc kubenswrapper[4792]: I0929 18:57:47.525599 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:47 crc kubenswrapper[4792]: I0929 18:57:47.525620 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:47Z","lastTransitionTime":"2025-09-29T18:57:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:47 crc kubenswrapper[4792]: I0929 18:57:47.628275 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:47 crc kubenswrapper[4792]: I0929 18:57:47.629026 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:47 crc kubenswrapper[4792]: I0929 18:57:47.629063 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:47 crc kubenswrapper[4792]: I0929 18:57:47.629092 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:47 crc kubenswrapper[4792]: I0929 18:57:47.629135 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:47Z","lastTransitionTime":"2025-09-29T18:57:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:47 crc kubenswrapper[4792]: I0929 18:57:47.731558 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:47 crc kubenswrapper[4792]: I0929 18:57:47.731590 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:47 crc kubenswrapper[4792]: I0929 18:57:47.731600 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:47 crc kubenswrapper[4792]: I0929 18:57:47.731614 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:47 crc kubenswrapper[4792]: I0929 18:57:47.731622 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:47Z","lastTransitionTime":"2025-09-29T18:57:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:47 crc kubenswrapper[4792]: I0929 18:57:47.834951 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:47 crc kubenswrapper[4792]: I0929 18:57:47.835011 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:47 crc kubenswrapper[4792]: I0929 18:57:47.835027 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:47 crc kubenswrapper[4792]: I0929 18:57:47.835050 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:47 crc kubenswrapper[4792]: I0929 18:57:47.835080 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:47Z","lastTransitionTime":"2025-09-29T18:57:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:47 crc kubenswrapper[4792]: I0929 18:57:47.938162 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:47 crc kubenswrapper[4792]: I0929 18:57:47.938198 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:47 crc kubenswrapper[4792]: I0929 18:57:47.938209 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:47 crc kubenswrapper[4792]: I0929 18:57:47.938224 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:47 crc kubenswrapper[4792]: I0929 18:57:47.938236 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:47Z","lastTransitionTime":"2025-09-29T18:57:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:48 crc kubenswrapper[4792]: I0929 18:57:48.040756 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:48 crc kubenswrapper[4792]: I0929 18:57:48.040790 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:48 crc kubenswrapper[4792]: I0929 18:57:48.040802 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:48 crc kubenswrapper[4792]: I0929 18:57:48.040818 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:48 crc kubenswrapper[4792]: I0929 18:57:48.040828 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:48Z","lastTransitionTime":"2025-09-29T18:57:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:48 crc kubenswrapper[4792]: I0929 18:57:48.142596 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:48 crc kubenswrapper[4792]: I0929 18:57:48.142730 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:48 crc kubenswrapper[4792]: I0929 18:57:48.142749 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:48 crc kubenswrapper[4792]: I0929 18:57:48.142774 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:48 crc kubenswrapper[4792]: I0929 18:57:48.142792 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:48Z","lastTransitionTime":"2025-09-29T18:57:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:48 crc kubenswrapper[4792]: I0929 18:57:48.244752 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:48 crc kubenswrapper[4792]: I0929 18:57:48.244785 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:48 crc kubenswrapper[4792]: I0929 18:57:48.244794 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:48 crc kubenswrapper[4792]: I0929 18:57:48.244807 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:48 crc kubenswrapper[4792]: I0929 18:57:48.244815 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:48Z","lastTransitionTime":"2025-09-29T18:57:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:48 crc kubenswrapper[4792]: I0929 18:57:48.346433 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:48 crc kubenswrapper[4792]: I0929 18:57:48.346470 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:48 crc kubenswrapper[4792]: I0929 18:57:48.346479 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:48 crc kubenswrapper[4792]: I0929 18:57:48.346494 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:48 crc kubenswrapper[4792]: I0929 18:57:48.346503 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:48Z","lastTransitionTime":"2025-09-29T18:57:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:48 crc kubenswrapper[4792]: I0929 18:57:48.448954 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:48 crc kubenswrapper[4792]: I0929 18:57:48.449024 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:48 crc kubenswrapper[4792]: I0929 18:57:48.449049 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:48 crc kubenswrapper[4792]: I0929 18:57:48.449075 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:48 crc kubenswrapper[4792]: I0929 18:57:48.449092 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:48Z","lastTransitionTime":"2025-09-29T18:57:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:48 crc kubenswrapper[4792]: I0929 18:57:48.551983 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:48 crc kubenswrapper[4792]: I0929 18:57:48.552047 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:48 crc kubenswrapper[4792]: I0929 18:57:48.552064 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:48 crc kubenswrapper[4792]: I0929 18:57:48.552087 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:48 crc kubenswrapper[4792]: I0929 18:57:48.552107 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:48Z","lastTransitionTime":"2025-09-29T18:57:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:48 crc kubenswrapper[4792]: I0929 18:57:48.654238 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:48 crc kubenswrapper[4792]: I0929 18:57:48.655061 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:48 crc kubenswrapper[4792]: I0929 18:57:48.655222 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:48 crc kubenswrapper[4792]: I0929 18:57:48.655364 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:48 crc kubenswrapper[4792]: I0929 18:57:48.655508 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:48Z","lastTransitionTime":"2025-09-29T18:57:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:48 crc kubenswrapper[4792]: I0929 18:57:48.759409 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:48 crc kubenswrapper[4792]: I0929 18:57:48.759473 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:48 crc kubenswrapper[4792]: I0929 18:57:48.759496 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:48 crc kubenswrapper[4792]: I0929 18:57:48.759523 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:48 crc kubenswrapper[4792]: I0929 18:57:48.759544 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:48Z","lastTransitionTime":"2025-09-29T18:57:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:48 crc kubenswrapper[4792]: I0929 18:57:48.863966 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:48 crc kubenswrapper[4792]: I0929 18:57:48.864007 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:48 crc kubenswrapper[4792]: I0929 18:57:48.864018 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:48 crc kubenswrapper[4792]: I0929 18:57:48.864034 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:48 crc kubenswrapper[4792]: I0929 18:57:48.864046 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:48Z","lastTransitionTime":"2025-09-29T18:57:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:48 crc kubenswrapper[4792]: I0929 18:57:48.966758 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:48 crc kubenswrapper[4792]: I0929 18:57:48.966824 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:48 crc kubenswrapper[4792]: I0929 18:57:48.966889 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:48 crc kubenswrapper[4792]: I0929 18:57:48.966920 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:48 crc kubenswrapper[4792]: I0929 18:57:48.966944 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:48Z","lastTransitionTime":"2025-09-29T18:57:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:49 crc kubenswrapper[4792]: I0929 18:57:49.015089 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 18:57:49 crc kubenswrapper[4792]: E0929 18:57:49.015739 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 18:57:49 crc kubenswrapper[4792]: I0929 18:57:49.015168 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 18:57:49 crc kubenswrapper[4792]: E0929 18:57:49.016253 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 18:57:49 crc kubenswrapper[4792]: I0929 18:57:49.015096 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-v5b2m" Sep 29 18:57:49 crc kubenswrapper[4792]: E0929 18:57:49.016742 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-v5b2m" podUID="fd292349-0e5a-4d80-b163-193aa43c98db" Sep 29 18:57:49 crc kubenswrapper[4792]: I0929 18:57:49.015264 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 18:57:49 crc kubenswrapper[4792]: E0929 18:57:49.017500 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 18:57:49 crc kubenswrapper[4792]: I0929 18:57:49.030749 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0ae66548-086e-4ca9-bd6f-281ce46e7557\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b27d8e307d9f6545acd48d9a838dc98fec84ca2e48b357966af22144b8cd415f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://305645f1f10b20984067c3d0d32bc9a5936e191faecff2bb494be005fc471c65\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:53Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-p5q59\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:49Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:49 crc kubenswrapper[4792]: I0929 18:57:49.055006 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-rqbjv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"67c58ee5-e056-4e3e-91ed-a116350f2408\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://17470ef608c6b717c0346349c1e72046e200b3879df2772778878b0e83c05b7e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:57:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ms9xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cdd799a430b3a444b7ba74ae8c285de28790049390a462485812fe117f9dfbe8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cdd799a430b3a444b7ba74ae8c285de28790049390a462485812fe117f9dfbe8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T18:56:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ms9xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1d779fed3cf67ba40d6664f26d829858ec14749c48c09678b73d7fb8fe73c827\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1d779fed3cf67ba40d6664f26d829858ec14749c48c09678b73d7fb8fe73c827\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T18:56:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ms9xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3094a1172df2fd98e699c4d368a14584e51bce43389c9c6432e24e78d460a3eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3094a1172df2fd98e699c4d368a14584e51bce43389c9c6432e24e78d460a3eb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T18:56:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ms9xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://79dfa5c03ec31df7b6477646c437b7490658801c0b8f7fac5e9149e4c7a882bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://79dfa5c03ec31df7b6477646c437b7490658801c0b8f7fac5e9149e4c7a882bf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T18:56:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ms9xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://aed6e427a87d4a4617a9d1c9a4d37cf2f9815d1759336026545d563b1f9b6976\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aed6e427a87d4a4617a9d1c9a4d37cf2f9815d1759336026545d563b1f9b6976\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T18:57:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ms9xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0830e9f46c282fc2be6beeea2654758eb0b3a0a86b802f495928c846bc49f7ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0830e9f46c282fc2be6beeea2654758eb0b3a0a86b802f495928c846bc49f7ed\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T18:57:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T18:57:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ms9xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:53Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-rqbjv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:49Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:49 crc kubenswrapper[4792]: I0929 18:57:49.071058 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:49 crc kubenswrapper[4792]: I0929 18:57:49.071173 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:49 crc kubenswrapper[4792]: I0929 18:57:49.071202 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:49 crc kubenswrapper[4792]: I0929 18:57:49.071282 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:49 crc kubenswrapper[4792]: I0929 18:57:49.071353 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:49Z","lastTransitionTime":"2025-09-29T18:57:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:49 crc kubenswrapper[4792]: I0929 18:57:49.071798 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-v5b2m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fd292349-0e5a-4d80-b163-193aa43c98db\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:07Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:07Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:07Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d8ps7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d8ps7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:57:07Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-v5b2m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:49Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:49 crc kubenswrapper[4792]: I0929 18:57:49.095300 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"49187618-8fed-4b0f-bdf8-800408f708fc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://325b543480e9e1abd49c6ce98398a79ef51983b8035774b2e88447ee547733c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://12d3875b8db9620798f766024b1bc43b78759f42e467b67aaf87f0b0154a8fad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://100ab44da711fddded7f88aa053b6a47d1c8302557d9ae6a56d4f744140e34bd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://af34e705a941f92c031edf3d214a902640010036401914f60e598a46043d5eb3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:29Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:49Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:49 crc kubenswrapper[4792]: I0929 18:57:49.118536 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:52Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:52Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cb29207afd9a5fb06242890aaf6d32f2f789cbf824b0246706e7214486ac529c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:49Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:49 crc kubenswrapper[4792]: I0929 18:57:49.132363 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8067c4cf598ce2d361c4a76b51ef3cf14d1fc84ad7ee193d76e20cd980f197be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://693ef3ee15f0b8762a16adc20435397e073dea4b0028f4175899cb956eaab303\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:49Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:49 crc kubenswrapper[4792]: I0929 18:57:49.151541 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-hr4cm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"716c5fdd-0e02-4066-9210-93d805b6fe81\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:53Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:53Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7b64445ce1e067504326c5005136522f885ba8796579cfb651019d2372a89173\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://46a3cf64e8fd5f5c75be0dd56175bd00e95e2780c73e39558e3b68ca1e6a44bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3408e50d82d1e7f50d9cd4fb2b4e078059bbc4daba10ca93c3cab56d4fe190be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a5023531ae972c8f19f5fbf8cdb3c4040f1b63d5d7b9d00e885607f0f84c88a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e9625b3628f291ecaa686da104d719695bd8c46eb46d08f9eccab27a2013627\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3a44c0899a9afeaa74bb22565c3f9514603ce1b83f9794539f677d067785925\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1121dcf30430fdbc7f6353dacdf1f0233d9053a2acfd8e7e248bc6e9faaf66e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1121dcf30430fdbc7f6353dacdf1f0233d9053a2acfd8e7e248bc6e9faaf66e2\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T18:57:20Z\\\",\\\"message\\\":\\\".861680 6354 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:20Z is after 2025-08-24T17:21:41Z]\\\\nI0929 18:57:20.861689 6354 base_network_controller_pods.go:477] [default/openshift-network-diagnostics/network-check-source-55646444c4-trplf] creating logical port openshift-network-diagnostics_network-check-source-55646444c4-trplf for pod on switch crc\\\\nI0929 18:57:20.861690 6354 services_controller.go:451] Built service openshift-kube-scheduler-operator/metrics cluster-wide LB for network=default: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-kube-scheduler-operator/metrics_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", E\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T18:57:20Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-hr4cm_openshift-ovn-kubernetes(716c5fdd-0e02-4066-9210-93d805b6fe81)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7138892e31e3d1949d0ae4789515fc0fd9868469eb14de1464a2f59786b85f08\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d0516004c2ea4a5711f5e00dcfa01fd5c8d0c0d0d60fd31b0d7da586cd83a820\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d0516004c2ea4a5711f5e00dcfa01fd5c8d0c0d0d60fd31b0d7da586cd83a820\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T18:56:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:53Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-hr4cm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:49Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:49 crc kubenswrapper[4792]: I0929 18:57:49.165703 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-5hwvp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"100876d3-2539-47f1-91fa-0f91456ccac1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e36db1c84da57d66a764493ff741136d4bec9e23eb8f9f9517fd82dd9f829e8c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3ff4d11cfba0349ddf3f5a14c525716cfdff95c71698634e8feca328d6e41e2d\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T18:57:41Z\\\",\\\"message\\\":\\\"2025-09-29T18:56:56+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_bcd172c3-8b42-4b95-9328-75a122f6b049\\\\n2025-09-29T18:56:56+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_bcd172c3-8b42-4b95-9328-75a122f6b049 to /host/opt/cni/bin/\\\\n2025-09-29T18:56:56Z [verbose] multus-daemon started\\\\n2025-09-29T18:56:56Z [verbose] Readiness Indicator file check\\\\n2025-09-29T18:57:41Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:54Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:57:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfblz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:53Z\\\"}}\" for pod \"openshift-multus\"/\"multus-5hwvp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:49Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:49 crc kubenswrapper[4792]: I0929 18:57:49.174783 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:49 crc kubenswrapper[4792]: I0929 18:57:49.174857 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:49 crc kubenswrapper[4792]: I0929 18:57:49.174872 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:49 crc kubenswrapper[4792]: I0929 18:57:49.174888 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:49 crc kubenswrapper[4792]: I0929 18:57:49.174900 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:49Z","lastTransitionTime":"2025-09-29T18:57:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:49 crc kubenswrapper[4792]: I0929 18:57:49.188028 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"10bc9cb6-78d3-43a6-8276-db1cb1c116e0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://238035b6ad975064a44e7e9e760ae9f09c9ff2735ecc69f65df3fc3176f6d3b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://659f798faed7d5c35bf7959b8e42a37f2289854714d513962716cb73a0674a27\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://38d29fa86782b007a60cbc1ea8f2ccc250dabb38c84eb61931b05fbc170e6538\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f292b6e3b4a31cc851066c3112871836e2c896dea8913da0d3c5579fe5ebb65\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0fdb8c0687f0a5ce31078f6d7a9b643c41ad23199eff4b2878403ee5fd31f69f\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T18:56:45Z\\\",\\\"message\\\":\\\"9 18:56:45.310775 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0929 18:56:45.310981 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI0929 18:56:45.312802 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0929 18:56:45.312836 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0929 18:56:45.312870 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0929 18:56:45.312900 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0929 18:56:45.312941 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI0929 18:56:45.312944 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0929 18:56:45.312985 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI0929 18:56:45.313033 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI0929 18:56:45.313121 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-682747971/tls.crt::/tmp/serving-cert-682747971/tls.key\\\\\\\"\\\\nI0929 18:56:45.313163 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-682747971/tls.crt::/tmp/serving-cert-682747971/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1759172204\\\\\\\\\\\\\\\" (2025-09-29 18:56:43 +0000 UTC to 2025-10-29 18:56:44 +0000 UTC (now=2025-09-29 18:56:45.313121069 +0000 UTC))\\\\\\\"\\\\nF0929 18:56:45.313206 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:44Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:57:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://07171a8c3c7812c016ee534ff1332f697d2b2cdfa70fc9d94ae6a5f312e0e433\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:32Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6e733b73d0293211bf2e8e97dc7db49c34e8ac1ef1e6e19013183d8518345959\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6e733b73d0293211bf2e8e97dc7db49c34e8ac1ef1e6e19013183d8518345959\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T18:56:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:29Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:49Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:49 crc kubenswrapper[4792]: I0929 18:57:49.203283 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ba8b9fef5faf6504a0e363f092cc9f60b03723775a0a0624b6302b3dac43a7ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:49Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:49 crc kubenswrapper[4792]: I0929 18:57:49.217351 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:49Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:49 crc kubenswrapper[4792]: I0929 18:57:49.227652 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-c228l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc16dcda-372e-4aac-8c12-148bf93e8783\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://af2529526423852e215c3201a4d8807a880e07e9cf71d593f304a4a3c99900eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6mz5l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:52Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-c228l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:49Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:49 crc kubenswrapper[4792]: I0929 18:57:49.241408 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-rr4g5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1762a3e4-6068-48d9-9b1d-bd5b893803bb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7d6bc8edd4388d2cc81a9741b23060a4982a85a1ac8ec23f2052436cff8cd7fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:57:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xnnkn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8053630e82cf267f19de300922033d2cade4b754707ca7c0d7fbfd7e4957eefc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:57:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xnnkn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:57:06Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-rr4g5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:49Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:49 crc kubenswrapper[4792]: I0929 18:57:49.254173 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4df360a8-146c-4e9e-8e52-498553bdf779\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1f4bce20bf980ce88d41df2f3c40acfc93739122e3173ea15ceb0122219338f8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d2e3b84a17bd084dfa82505af7e250aea64057db67fdf494d8653ba30a883b6d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c31040d1362524ea1a3ee9961c3eb7b97c7c76709465df7590129430ddd3bb4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://db5ac0d0d3d08ebd2278af02df1e4639df5c635bfee2ec6fb8293e18c648af76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://db5ac0d0d3d08ebd2278af02df1e4639df5c635bfee2ec6fb8293e18c648af76\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T18:56:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:30Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:29Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:49Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:49 crc kubenswrapper[4792]: I0929 18:57:49.267010 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:49Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:49 crc kubenswrapper[4792]: I0929 18:57:49.277214 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:49 crc kubenswrapper[4792]: I0929 18:57:49.277426 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:49 crc kubenswrapper[4792]: I0929 18:57:49.277535 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:49 crc kubenswrapper[4792]: I0929 18:57:49.277648 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:49 crc kubenswrapper[4792]: I0929 18:57:49.277747 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:49Z","lastTransitionTime":"2025-09-29T18:57:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:49 crc kubenswrapper[4792]: I0929 18:57:49.283001 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:49Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:49 crc kubenswrapper[4792]: I0929 18:57:49.294345 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-4gmtk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b84b9e91-b50e-4271-bfc8-be15652128c5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b32084075b7423c8211ca56595a2eb11add581b500043804cb09f13d07788bd6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lc999\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:55Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-4gmtk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:49Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:49 crc kubenswrapper[4792]: I0929 18:57:49.380600 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:49 crc kubenswrapper[4792]: I0929 18:57:49.380635 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:49 crc kubenswrapper[4792]: I0929 18:57:49.380646 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:49 crc kubenswrapper[4792]: I0929 18:57:49.380662 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:49 crc kubenswrapper[4792]: I0929 18:57:49.380672 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:49Z","lastTransitionTime":"2025-09-29T18:57:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:49 crc kubenswrapper[4792]: I0929 18:57:49.483165 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:49 crc kubenswrapper[4792]: I0929 18:57:49.483211 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:49 crc kubenswrapper[4792]: I0929 18:57:49.483230 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:49 crc kubenswrapper[4792]: I0929 18:57:49.483254 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:49 crc kubenswrapper[4792]: I0929 18:57:49.483273 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:49Z","lastTransitionTime":"2025-09-29T18:57:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:49 crc kubenswrapper[4792]: I0929 18:57:49.585278 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:49 crc kubenswrapper[4792]: I0929 18:57:49.585318 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:49 crc kubenswrapper[4792]: I0929 18:57:49.585326 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:49 crc kubenswrapper[4792]: I0929 18:57:49.585340 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:49 crc kubenswrapper[4792]: I0929 18:57:49.585359 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:49Z","lastTransitionTime":"2025-09-29T18:57:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:49 crc kubenswrapper[4792]: I0929 18:57:49.688895 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:49 crc kubenswrapper[4792]: I0929 18:57:49.689816 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:49 crc kubenswrapper[4792]: I0929 18:57:49.689985 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:49 crc kubenswrapper[4792]: I0929 18:57:49.690131 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:49 crc kubenswrapper[4792]: I0929 18:57:49.690161 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:49Z","lastTransitionTime":"2025-09-29T18:57:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:49 crc kubenswrapper[4792]: I0929 18:57:49.793589 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:49 crc kubenswrapper[4792]: I0929 18:57:49.793651 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:49 crc kubenswrapper[4792]: I0929 18:57:49.793675 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:49 crc kubenswrapper[4792]: I0929 18:57:49.793704 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:49 crc kubenswrapper[4792]: I0929 18:57:49.793730 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:49Z","lastTransitionTime":"2025-09-29T18:57:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:49 crc kubenswrapper[4792]: I0929 18:57:49.896411 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:49 crc kubenswrapper[4792]: I0929 18:57:49.896503 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:49 crc kubenswrapper[4792]: I0929 18:57:49.896521 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:49 crc kubenswrapper[4792]: I0929 18:57:49.896544 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:49 crc kubenswrapper[4792]: I0929 18:57:49.896562 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:49Z","lastTransitionTime":"2025-09-29T18:57:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:49 crc kubenswrapper[4792]: I0929 18:57:49.999271 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:49 crc kubenswrapper[4792]: I0929 18:57:49.999362 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:49 crc kubenswrapper[4792]: I0929 18:57:49.999381 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:49 crc kubenswrapper[4792]: I0929 18:57:49.999404 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:49 crc kubenswrapper[4792]: I0929 18:57:49.999424 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:49Z","lastTransitionTime":"2025-09-29T18:57:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:50 crc kubenswrapper[4792]: I0929 18:57:50.101441 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:50 crc kubenswrapper[4792]: I0929 18:57:50.101498 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:50 crc kubenswrapper[4792]: I0929 18:57:50.101517 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:50 crc kubenswrapper[4792]: I0929 18:57:50.101540 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:50 crc kubenswrapper[4792]: I0929 18:57:50.101558 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:50Z","lastTransitionTime":"2025-09-29T18:57:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:50 crc kubenswrapper[4792]: I0929 18:57:50.204967 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:50 crc kubenswrapper[4792]: I0929 18:57:50.205006 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:50 crc kubenswrapper[4792]: I0929 18:57:50.205017 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:50 crc kubenswrapper[4792]: I0929 18:57:50.205032 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:50 crc kubenswrapper[4792]: I0929 18:57:50.205044 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:50Z","lastTransitionTime":"2025-09-29T18:57:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:50 crc kubenswrapper[4792]: I0929 18:57:50.307606 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:50 crc kubenswrapper[4792]: I0929 18:57:50.307679 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:50 crc kubenswrapper[4792]: I0929 18:57:50.307703 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:50 crc kubenswrapper[4792]: I0929 18:57:50.307732 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:50 crc kubenswrapper[4792]: I0929 18:57:50.307755 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:50Z","lastTransitionTime":"2025-09-29T18:57:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:50 crc kubenswrapper[4792]: I0929 18:57:50.410532 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:50 crc kubenswrapper[4792]: I0929 18:57:50.410568 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:50 crc kubenswrapper[4792]: I0929 18:57:50.410578 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:50 crc kubenswrapper[4792]: I0929 18:57:50.410592 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:50 crc kubenswrapper[4792]: I0929 18:57:50.410602 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:50Z","lastTransitionTime":"2025-09-29T18:57:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:50 crc kubenswrapper[4792]: I0929 18:57:50.513762 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:50 crc kubenswrapper[4792]: I0929 18:57:50.513840 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:50 crc kubenswrapper[4792]: I0929 18:57:50.513897 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:50 crc kubenswrapper[4792]: I0929 18:57:50.513924 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:50 crc kubenswrapper[4792]: I0929 18:57:50.513942 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:50Z","lastTransitionTime":"2025-09-29T18:57:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:50 crc kubenswrapper[4792]: I0929 18:57:50.616778 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:50 crc kubenswrapper[4792]: I0929 18:57:50.616821 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:50 crc kubenswrapper[4792]: I0929 18:57:50.616830 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:50 crc kubenswrapper[4792]: I0929 18:57:50.616845 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:50 crc kubenswrapper[4792]: I0929 18:57:50.616869 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:50Z","lastTransitionTime":"2025-09-29T18:57:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:50 crc kubenswrapper[4792]: I0929 18:57:50.720398 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:50 crc kubenswrapper[4792]: I0929 18:57:50.720447 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:50 crc kubenswrapper[4792]: I0929 18:57:50.720462 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:50 crc kubenswrapper[4792]: I0929 18:57:50.720481 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:50 crc kubenswrapper[4792]: I0929 18:57:50.720495 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:50Z","lastTransitionTime":"2025-09-29T18:57:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:50 crc kubenswrapper[4792]: I0929 18:57:50.823134 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:50 crc kubenswrapper[4792]: I0929 18:57:50.823205 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:50 crc kubenswrapper[4792]: I0929 18:57:50.823223 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:50 crc kubenswrapper[4792]: I0929 18:57:50.823248 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:50 crc kubenswrapper[4792]: I0929 18:57:50.823265 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:50Z","lastTransitionTime":"2025-09-29T18:57:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:50 crc kubenswrapper[4792]: I0929 18:57:50.926485 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:50 crc kubenswrapper[4792]: I0929 18:57:50.926557 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:50 crc kubenswrapper[4792]: I0929 18:57:50.926575 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:50 crc kubenswrapper[4792]: I0929 18:57:50.926599 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:50 crc kubenswrapper[4792]: I0929 18:57:50.926616 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:50Z","lastTransitionTime":"2025-09-29T18:57:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:51 crc kubenswrapper[4792]: I0929 18:57:51.015023 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 18:57:51 crc kubenswrapper[4792]: I0929 18:57:51.015174 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-v5b2m" Sep 29 18:57:51 crc kubenswrapper[4792]: E0929 18:57:51.015397 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 18:57:51 crc kubenswrapper[4792]: I0929 18:57:51.015417 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 18:57:51 crc kubenswrapper[4792]: I0929 18:57:51.015506 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 18:57:51 crc kubenswrapper[4792]: E0929 18:57:51.015772 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-v5b2m" podUID="fd292349-0e5a-4d80-b163-193aa43c98db" Sep 29 18:57:51 crc kubenswrapper[4792]: E0929 18:57:51.016261 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 18:57:51 crc kubenswrapper[4792]: I0929 18:57:51.016666 4792 scope.go:117] "RemoveContainer" containerID="1121dcf30430fdbc7f6353dacdf1f0233d9053a2acfd8e7e248bc6e9faaf66e2" Sep 29 18:57:51 crc kubenswrapper[4792]: E0929 18:57:51.016566 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 18:57:51 crc kubenswrapper[4792]: I0929 18:57:51.029808 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:51 crc kubenswrapper[4792]: I0929 18:57:51.029925 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:51 crc kubenswrapper[4792]: I0929 18:57:51.029941 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:51 crc kubenswrapper[4792]: I0929 18:57:51.029963 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:51 crc kubenswrapper[4792]: I0929 18:57:51.029982 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:51Z","lastTransitionTime":"2025-09-29T18:57:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:51 crc kubenswrapper[4792]: I0929 18:57:51.134989 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:51 crc kubenswrapper[4792]: I0929 18:57:51.135476 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:51 crc kubenswrapper[4792]: I0929 18:57:51.136058 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:51 crc kubenswrapper[4792]: I0929 18:57:51.136265 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:51 crc kubenswrapper[4792]: I0929 18:57:51.136412 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:51Z","lastTransitionTime":"2025-09-29T18:57:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:51 crc kubenswrapper[4792]: I0929 18:57:51.240327 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:51 crc kubenswrapper[4792]: I0929 18:57:51.240374 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:51 crc kubenswrapper[4792]: I0929 18:57:51.240384 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:51 crc kubenswrapper[4792]: I0929 18:57:51.240404 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:51 crc kubenswrapper[4792]: I0929 18:57:51.240416 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:51Z","lastTransitionTime":"2025-09-29T18:57:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:51 crc kubenswrapper[4792]: I0929 18:57:51.344086 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:51 crc kubenswrapper[4792]: I0929 18:57:51.344161 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:51 crc kubenswrapper[4792]: I0929 18:57:51.344183 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:51 crc kubenswrapper[4792]: I0929 18:57:51.344210 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:51 crc kubenswrapper[4792]: I0929 18:57:51.344233 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:51Z","lastTransitionTime":"2025-09-29T18:57:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:51 crc kubenswrapper[4792]: I0929 18:57:51.448416 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:51 crc kubenswrapper[4792]: I0929 18:57:51.448492 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:51 crc kubenswrapper[4792]: I0929 18:57:51.448518 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:51 crc kubenswrapper[4792]: I0929 18:57:51.448547 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:51 crc kubenswrapper[4792]: I0929 18:57:51.448571 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:51Z","lastTransitionTime":"2025-09-29T18:57:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:51 crc kubenswrapper[4792]: I0929 18:57:51.509582 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-hr4cm_716c5fdd-0e02-4066-9210-93d805b6fe81/ovnkube-controller/2.log" Sep 29 18:57:51 crc kubenswrapper[4792]: I0929 18:57:51.512962 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-hr4cm" event={"ID":"716c5fdd-0e02-4066-9210-93d805b6fe81","Type":"ContainerStarted","Data":"c8f292dcc3508a8a9eee89bc5ade8d636ca7f748ad3ac0af62bf8f356dace386"} Sep 29 18:57:51 crc kubenswrapper[4792]: I0929 18:57:51.513443 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-hr4cm" Sep 29 18:57:51 crc kubenswrapper[4792]: I0929 18:57:51.537514 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"10bc9cb6-78d3-43a6-8276-db1cb1c116e0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://238035b6ad975064a44e7e9e760ae9f09c9ff2735ecc69f65df3fc3176f6d3b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://659f798faed7d5c35bf7959b8e42a37f2289854714d513962716cb73a0674a27\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://38d29fa86782b007a60cbc1ea8f2ccc250dabb38c84eb61931b05fbc170e6538\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f292b6e3b4a31cc851066c3112871836e2c896dea8913da0d3c5579fe5ebb65\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0fdb8c0687f0a5ce31078f6d7a9b643c41ad23199eff4b2878403ee5fd31f69f\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T18:56:45Z\\\",\\\"message\\\":\\\"9 18:56:45.310775 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0929 18:56:45.310981 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI0929 18:56:45.312802 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0929 18:56:45.312836 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0929 18:56:45.312870 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0929 18:56:45.312900 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0929 18:56:45.312941 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI0929 18:56:45.312944 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0929 18:56:45.312985 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI0929 18:56:45.313033 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI0929 18:56:45.313121 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-682747971/tls.crt::/tmp/serving-cert-682747971/tls.key\\\\\\\"\\\\nI0929 18:56:45.313163 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-682747971/tls.crt::/tmp/serving-cert-682747971/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1759172204\\\\\\\\\\\\\\\" (2025-09-29 18:56:43 +0000 UTC to 2025-10-29 18:56:44 +0000 UTC (now=2025-09-29 18:56:45.313121069 +0000 UTC))\\\\\\\"\\\\nF0929 18:56:45.313206 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:44Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:57:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://07171a8c3c7812c016ee534ff1332f697d2b2cdfa70fc9d94ae6a5f312e0e433\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:32Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6e733b73d0293211bf2e8e97dc7db49c34e8ac1ef1e6e19013183d8518345959\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6e733b73d0293211bf2e8e97dc7db49c34e8ac1ef1e6e19013183d8518345959\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T18:56:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:29Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:51Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:51 crc kubenswrapper[4792]: I0929 18:57:51.551368 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:51 crc kubenswrapper[4792]: I0929 18:57:51.551407 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:51 crc kubenswrapper[4792]: I0929 18:57:51.551419 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:51 crc kubenswrapper[4792]: I0929 18:57:51.551439 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:51 crc kubenswrapper[4792]: I0929 18:57:51.551455 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:51Z","lastTransitionTime":"2025-09-29T18:57:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:51 crc kubenswrapper[4792]: I0929 18:57:51.566203 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ba8b9fef5faf6504a0e363f092cc9f60b03723775a0a0624b6302b3dac43a7ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:51Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:51 crc kubenswrapper[4792]: I0929 18:57:51.584842 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8067c4cf598ce2d361c4a76b51ef3cf14d1fc84ad7ee193d76e20cd980f197be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://693ef3ee15f0b8762a16adc20435397e073dea4b0028f4175899cb956eaab303\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:51Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:51 crc kubenswrapper[4792]: I0929 18:57:51.605816 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-hr4cm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"716c5fdd-0e02-4066-9210-93d805b6fe81\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:53Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:53Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7b64445ce1e067504326c5005136522f885ba8796579cfb651019d2372a89173\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://46a3cf64e8fd5f5c75be0dd56175bd00e95e2780c73e39558e3b68ca1e6a44bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3408e50d82d1e7f50d9cd4fb2b4e078059bbc4daba10ca93c3cab56d4fe190be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a5023531ae972c8f19f5fbf8cdb3c4040f1b63d5d7b9d00e885607f0f84c88a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e9625b3628f291ecaa686da104d719695bd8c46eb46d08f9eccab27a2013627\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3a44c0899a9afeaa74bb22565c3f9514603ce1b83f9794539f677d067785925\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c8f292dcc3508a8a9eee89bc5ade8d636ca7f748ad3ac0af62bf8f356dace386\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1121dcf30430fdbc7f6353dacdf1f0233d9053a2acfd8e7e248bc6e9faaf66e2\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T18:57:20Z\\\",\\\"message\\\":\\\".861680 6354 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:20Z is after 2025-08-24T17:21:41Z]\\\\nI0929 18:57:20.861689 6354 base_network_controller_pods.go:477] [default/openshift-network-diagnostics/network-check-source-55646444c4-trplf] creating logical port openshift-network-diagnostics_network-check-source-55646444c4-trplf for pod on switch crc\\\\nI0929 18:57:20.861690 6354 services_controller.go:451] Built service openshift-kube-scheduler-operator/metrics cluster-wide LB for network=default: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-kube-scheduler-operator/metrics_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", E\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T18:57:20Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:57:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7138892e31e3d1949d0ae4789515fc0fd9868469eb14de1464a2f59786b85f08\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d0516004c2ea4a5711f5e00dcfa01fd5c8d0c0d0d60fd31b0d7da586cd83a820\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d0516004c2ea4a5711f5e00dcfa01fd5c8d0c0d0d60fd31b0d7da586cd83a820\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T18:56:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:53Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-hr4cm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:51Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:51 crc kubenswrapper[4792]: I0929 18:57:51.635097 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-5hwvp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"100876d3-2539-47f1-91fa-0f91456ccac1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e36db1c84da57d66a764493ff741136d4bec9e23eb8f9f9517fd82dd9f829e8c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3ff4d11cfba0349ddf3f5a14c525716cfdff95c71698634e8feca328d6e41e2d\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T18:57:41Z\\\",\\\"message\\\":\\\"2025-09-29T18:56:56+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_bcd172c3-8b42-4b95-9328-75a122f6b049\\\\n2025-09-29T18:56:56+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_bcd172c3-8b42-4b95-9328-75a122f6b049 to /host/opt/cni/bin/\\\\n2025-09-29T18:56:56Z [verbose] multus-daemon started\\\\n2025-09-29T18:56:56Z [verbose] Readiness Indicator file check\\\\n2025-09-29T18:57:41Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:54Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:57:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfblz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:53Z\\\"}}\" for pod \"openshift-multus\"/\"multus-5hwvp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:51Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:51 crc kubenswrapper[4792]: I0929 18:57:51.650944 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4df360a8-146c-4e9e-8e52-498553bdf779\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1f4bce20bf980ce88d41df2f3c40acfc93739122e3173ea15ceb0122219338f8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d2e3b84a17bd084dfa82505af7e250aea64057db67fdf494d8653ba30a883b6d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c31040d1362524ea1a3ee9961c3eb7b97c7c76709465df7590129430ddd3bb4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://db5ac0d0d3d08ebd2278af02df1e4639df5c635bfee2ec6fb8293e18c648af76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://db5ac0d0d3d08ebd2278af02df1e4639df5c635bfee2ec6fb8293e18c648af76\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T18:56:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:30Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:29Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:51Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:51 crc kubenswrapper[4792]: I0929 18:57:51.653635 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:51 crc kubenswrapper[4792]: I0929 18:57:51.653693 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:51 crc kubenswrapper[4792]: I0929 18:57:51.653706 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:51 crc kubenswrapper[4792]: I0929 18:57:51.653725 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:51 crc kubenswrapper[4792]: I0929 18:57:51.653739 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:51Z","lastTransitionTime":"2025-09-29T18:57:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:51 crc kubenswrapper[4792]: I0929 18:57:51.670341 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:51Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:51 crc kubenswrapper[4792]: I0929 18:57:51.691148 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:51Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:51 crc kubenswrapper[4792]: I0929 18:57:51.702298 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-c228l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc16dcda-372e-4aac-8c12-148bf93e8783\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://af2529526423852e215c3201a4d8807a880e07e9cf71d593f304a4a3c99900eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6mz5l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:52Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-c228l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:51Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:51 crc kubenswrapper[4792]: I0929 18:57:51.720227 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-rr4g5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1762a3e4-6068-48d9-9b1d-bd5b893803bb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7d6bc8edd4388d2cc81a9741b23060a4982a85a1ac8ec23f2052436cff8cd7fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:57:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xnnkn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8053630e82cf267f19de300922033d2cade4b754707ca7c0d7fbfd7e4957eefc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:57:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xnnkn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:57:06Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-rr4g5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:51Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:51 crc kubenswrapper[4792]: I0929 18:57:51.739728 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:51Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:51 crc kubenswrapper[4792]: I0929 18:57:51.756585 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:51 crc kubenswrapper[4792]: I0929 18:57:51.756635 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:51 crc kubenswrapper[4792]: I0929 18:57:51.756651 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:51 crc kubenswrapper[4792]: I0929 18:57:51.756675 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:51 crc kubenswrapper[4792]: I0929 18:57:51.756693 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:51Z","lastTransitionTime":"2025-09-29T18:57:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:51 crc kubenswrapper[4792]: I0929 18:57:51.791553 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-4gmtk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b84b9e91-b50e-4271-bfc8-be15652128c5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b32084075b7423c8211ca56595a2eb11add581b500043804cb09f13d07788bd6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lc999\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:55Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-4gmtk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:51Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:51 crc kubenswrapper[4792]: I0929 18:57:51.809329 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"49187618-8fed-4b0f-bdf8-800408f708fc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://325b543480e9e1abd49c6ce98398a79ef51983b8035774b2e88447ee547733c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://12d3875b8db9620798f766024b1bc43b78759f42e467b67aaf87f0b0154a8fad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://100ab44da711fddded7f88aa053b6a47d1c8302557d9ae6a56d4f744140e34bd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://af34e705a941f92c031edf3d214a902640010036401914f60e598a46043d5eb3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:29Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:51Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:51 crc kubenswrapper[4792]: I0929 18:57:51.821054 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:52Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:52Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cb29207afd9a5fb06242890aaf6d32f2f789cbf824b0246706e7214486ac529c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:51Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:51 crc kubenswrapper[4792]: I0929 18:57:51.833168 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0ae66548-086e-4ca9-bd6f-281ce46e7557\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b27d8e307d9f6545acd48d9a838dc98fec84ca2e48b357966af22144b8cd415f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://305645f1f10b20984067c3d0d32bc9a5936e191faecff2bb494be005fc471c65\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:53Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-p5q59\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:51Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:51 crc kubenswrapper[4792]: I0929 18:57:51.850876 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-rqbjv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"67c58ee5-e056-4e3e-91ed-a116350f2408\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://17470ef608c6b717c0346349c1e72046e200b3879df2772778878b0e83c05b7e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:57:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ms9xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cdd799a430b3a444b7ba74ae8c285de28790049390a462485812fe117f9dfbe8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cdd799a430b3a444b7ba74ae8c285de28790049390a462485812fe117f9dfbe8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T18:56:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ms9xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1d779fed3cf67ba40d6664f26d829858ec14749c48c09678b73d7fb8fe73c827\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1d779fed3cf67ba40d6664f26d829858ec14749c48c09678b73d7fb8fe73c827\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T18:56:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ms9xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3094a1172df2fd98e699c4d368a14584e51bce43389c9c6432e24e78d460a3eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3094a1172df2fd98e699c4d368a14584e51bce43389c9c6432e24e78d460a3eb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T18:56:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ms9xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://79dfa5c03ec31df7b6477646c437b7490658801c0b8f7fac5e9149e4c7a882bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://79dfa5c03ec31df7b6477646c437b7490658801c0b8f7fac5e9149e4c7a882bf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T18:56:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ms9xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://aed6e427a87d4a4617a9d1c9a4d37cf2f9815d1759336026545d563b1f9b6976\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aed6e427a87d4a4617a9d1c9a4d37cf2f9815d1759336026545d563b1f9b6976\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T18:57:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ms9xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0830e9f46c282fc2be6beeea2654758eb0b3a0a86b802f495928c846bc49f7ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0830e9f46c282fc2be6beeea2654758eb0b3a0a86b802f495928c846bc49f7ed\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T18:57:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T18:57:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ms9xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:53Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-rqbjv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:51Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:51 crc kubenswrapper[4792]: I0929 18:57:51.859018 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:51 crc kubenswrapper[4792]: I0929 18:57:51.859092 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:51 crc kubenswrapper[4792]: I0929 18:57:51.859111 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:51 crc kubenswrapper[4792]: I0929 18:57:51.859136 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:51 crc kubenswrapper[4792]: I0929 18:57:51.859153 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:51Z","lastTransitionTime":"2025-09-29T18:57:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:51 crc kubenswrapper[4792]: I0929 18:57:51.864979 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-v5b2m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fd292349-0e5a-4d80-b163-193aa43c98db\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:07Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:07Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:07Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d8ps7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d8ps7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:57:07Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-v5b2m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:51Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:51 crc kubenswrapper[4792]: I0929 18:57:51.962034 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:51 crc kubenswrapper[4792]: I0929 18:57:51.962118 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:51 crc kubenswrapper[4792]: I0929 18:57:51.962148 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:51 crc kubenswrapper[4792]: I0929 18:57:51.962179 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:51 crc kubenswrapper[4792]: I0929 18:57:51.962196 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:51Z","lastTransitionTime":"2025-09-29T18:57:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:52 crc kubenswrapper[4792]: I0929 18:57:52.064673 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:52 crc kubenswrapper[4792]: I0929 18:57:52.064737 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:52 crc kubenswrapper[4792]: I0929 18:57:52.064758 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:52 crc kubenswrapper[4792]: I0929 18:57:52.064780 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:52 crc kubenswrapper[4792]: I0929 18:57:52.064793 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:52Z","lastTransitionTime":"2025-09-29T18:57:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:52 crc kubenswrapper[4792]: I0929 18:57:52.168780 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:52 crc kubenswrapper[4792]: I0929 18:57:52.168824 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:52 crc kubenswrapper[4792]: I0929 18:57:52.168835 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:52 crc kubenswrapper[4792]: I0929 18:57:52.168871 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:52 crc kubenswrapper[4792]: I0929 18:57:52.168882 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:52Z","lastTransitionTime":"2025-09-29T18:57:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:52 crc kubenswrapper[4792]: I0929 18:57:52.272830 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:52 crc kubenswrapper[4792]: I0929 18:57:52.272926 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:52 crc kubenswrapper[4792]: I0929 18:57:52.272947 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:52 crc kubenswrapper[4792]: I0929 18:57:52.272971 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:52 crc kubenswrapper[4792]: I0929 18:57:52.272989 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:52Z","lastTransitionTime":"2025-09-29T18:57:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:52 crc kubenswrapper[4792]: I0929 18:57:52.375674 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:52 crc kubenswrapper[4792]: I0929 18:57:52.375727 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:52 crc kubenswrapper[4792]: I0929 18:57:52.375741 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:52 crc kubenswrapper[4792]: I0929 18:57:52.375762 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:52 crc kubenswrapper[4792]: I0929 18:57:52.375774 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:52Z","lastTransitionTime":"2025-09-29T18:57:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:52 crc kubenswrapper[4792]: I0929 18:57:52.478195 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:52 crc kubenswrapper[4792]: I0929 18:57:52.478235 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:52 crc kubenswrapper[4792]: I0929 18:57:52.478245 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:52 crc kubenswrapper[4792]: I0929 18:57:52.478262 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:52 crc kubenswrapper[4792]: I0929 18:57:52.478274 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:52Z","lastTransitionTime":"2025-09-29T18:57:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:52 crc kubenswrapper[4792]: I0929 18:57:52.519155 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-hr4cm_716c5fdd-0e02-4066-9210-93d805b6fe81/ovnkube-controller/3.log" Sep 29 18:57:52 crc kubenswrapper[4792]: I0929 18:57:52.520070 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-hr4cm_716c5fdd-0e02-4066-9210-93d805b6fe81/ovnkube-controller/2.log" Sep 29 18:57:52 crc kubenswrapper[4792]: I0929 18:57:52.523317 4792 generic.go:334] "Generic (PLEG): container finished" podID="716c5fdd-0e02-4066-9210-93d805b6fe81" containerID="c8f292dcc3508a8a9eee89bc5ade8d636ca7f748ad3ac0af62bf8f356dace386" exitCode=1 Sep 29 18:57:52 crc kubenswrapper[4792]: I0929 18:57:52.523393 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-hr4cm" event={"ID":"716c5fdd-0e02-4066-9210-93d805b6fe81","Type":"ContainerDied","Data":"c8f292dcc3508a8a9eee89bc5ade8d636ca7f748ad3ac0af62bf8f356dace386"} Sep 29 18:57:52 crc kubenswrapper[4792]: I0929 18:57:52.523464 4792 scope.go:117] "RemoveContainer" containerID="1121dcf30430fdbc7f6353dacdf1f0233d9053a2acfd8e7e248bc6e9faaf66e2" Sep 29 18:57:52 crc kubenswrapper[4792]: I0929 18:57:52.526197 4792 scope.go:117] "RemoveContainer" containerID="c8f292dcc3508a8a9eee89bc5ade8d636ca7f748ad3ac0af62bf8f356dace386" Sep 29 18:57:52 crc kubenswrapper[4792]: E0929 18:57:52.526455 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-hr4cm_openshift-ovn-kubernetes(716c5fdd-0e02-4066-9210-93d805b6fe81)\"" pod="openshift-ovn-kubernetes/ovnkube-node-hr4cm" podUID="716c5fdd-0e02-4066-9210-93d805b6fe81" Sep 29 18:57:52 crc kubenswrapper[4792]: I0929 18:57:52.540113 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:52Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:52 crc kubenswrapper[4792]: I0929 18:57:52.555316 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-4gmtk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b84b9e91-b50e-4271-bfc8-be15652128c5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b32084075b7423c8211ca56595a2eb11add581b500043804cb09f13d07788bd6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lc999\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:55Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-4gmtk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:52Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:52 crc kubenswrapper[4792]: I0929 18:57:52.573603 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"49187618-8fed-4b0f-bdf8-800408f708fc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://325b543480e9e1abd49c6ce98398a79ef51983b8035774b2e88447ee547733c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://12d3875b8db9620798f766024b1bc43b78759f42e467b67aaf87f0b0154a8fad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://100ab44da711fddded7f88aa053b6a47d1c8302557d9ae6a56d4f744140e34bd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://af34e705a941f92c031edf3d214a902640010036401914f60e598a46043d5eb3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:29Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:52Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:52 crc kubenswrapper[4792]: I0929 18:57:52.582241 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:52 crc kubenswrapper[4792]: I0929 18:57:52.582626 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:52 crc kubenswrapper[4792]: I0929 18:57:52.582776 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:52 crc kubenswrapper[4792]: I0929 18:57:52.582945 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:52 crc kubenswrapper[4792]: I0929 18:57:52.583360 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:52Z","lastTransitionTime":"2025-09-29T18:57:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:52 crc kubenswrapper[4792]: I0929 18:57:52.593521 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:52Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:52Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cb29207afd9a5fb06242890aaf6d32f2f789cbf824b0246706e7214486ac529c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:52Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:52 crc kubenswrapper[4792]: I0929 18:57:52.610428 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0ae66548-086e-4ca9-bd6f-281ce46e7557\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b27d8e307d9f6545acd48d9a838dc98fec84ca2e48b357966af22144b8cd415f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://305645f1f10b20984067c3d0d32bc9a5936e191faecff2bb494be005fc471c65\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:53Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-p5q59\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:52Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:52 crc kubenswrapper[4792]: I0929 18:57:52.635873 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-rqbjv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"67c58ee5-e056-4e3e-91ed-a116350f2408\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://17470ef608c6b717c0346349c1e72046e200b3879df2772778878b0e83c05b7e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:57:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ms9xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cdd799a430b3a444b7ba74ae8c285de28790049390a462485812fe117f9dfbe8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cdd799a430b3a444b7ba74ae8c285de28790049390a462485812fe117f9dfbe8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T18:56:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ms9xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1d779fed3cf67ba40d6664f26d829858ec14749c48c09678b73d7fb8fe73c827\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1d779fed3cf67ba40d6664f26d829858ec14749c48c09678b73d7fb8fe73c827\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T18:56:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ms9xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3094a1172df2fd98e699c4d368a14584e51bce43389c9c6432e24e78d460a3eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3094a1172df2fd98e699c4d368a14584e51bce43389c9c6432e24e78d460a3eb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T18:56:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ms9xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://79dfa5c03ec31df7b6477646c437b7490658801c0b8f7fac5e9149e4c7a882bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://79dfa5c03ec31df7b6477646c437b7490658801c0b8f7fac5e9149e4c7a882bf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T18:56:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ms9xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://aed6e427a87d4a4617a9d1c9a4d37cf2f9815d1759336026545d563b1f9b6976\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aed6e427a87d4a4617a9d1c9a4d37cf2f9815d1759336026545d563b1f9b6976\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T18:57:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ms9xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0830e9f46c282fc2be6beeea2654758eb0b3a0a86b802f495928c846bc49f7ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0830e9f46c282fc2be6beeea2654758eb0b3a0a86b802f495928c846bc49f7ed\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T18:57:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T18:57:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ms9xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:53Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-rqbjv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:52Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:52 crc kubenswrapper[4792]: I0929 18:57:52.648840 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-v5b2m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fd292349-0e5a-4d80-b163-193aa43c98db\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:07Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:07Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:07Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d8ps7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d8ps7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:57:07Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-v5b2m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:52Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:52 crc kubenswrapper[4792]: I0929 18:57:52.663766 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"10bc9cb6-78d3-43a6-8276-db1cb1c116e0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://238035b6ad975064a44e7e9e760ae9f09c9ff2735ecc69f65df3fc3176f6d3b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://659f798faed7d5c35bf7959b8e42a37f2289854714d513962716cb73a0674a27\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://38d29fa86782b007a60cbc1ea8f2ccc250dabb38c84eb61931b05fbc170e6538\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f292b6e3b4a31cc851066c3112871836e2c896dea8913da0d3c5579fe5ebb65\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0fdb8c0687f0a5ce31078f6d7a9b643c41ad23199eff4b2878403ee5fd31f69f\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T18:56:45Z\\\",\\\"message\\\":\\\"9 18:56:45.310775 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0929 18:56:45.310981 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI0929 18:56:45.312802 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0929 18:56:45.312836 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0929 18:56:45.312870 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0929 18:56:45.312900 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0929 18:56:45.312941 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI0929 18:56:45.312944 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0929 18:56:45.312985 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI0929 18:56:45.313033 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI0929 18:56:45.313121 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-682747971/tls.crt::/tmp/serving-cert-682747971/tls.key\\\\\\\"\\\\nI0929 18:56:45.313163 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-682747971/tls.crt::/tmp/serving-cert-682747971/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1759172204\\\\\\\\\\\\\\\" (2025-09-29 18:56:43 +0000 UTC to 2025-10-29 18:56:44 +0000 UTC (now=2025-09-29 18:56:45.313121069 +0000 UTC))\\\\\\\"\\\\nF0929 18:56:45.313206 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:44Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:57:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://07171a8c3c7812c016ee534ff1332f697d2b2cdfa70fc9d94ae6a5f312e0e433\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:32Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6e733b73d0293211bf2e8e97dc7db49c34e8ac1ef1e6e19013183d8518345959\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6e733b73d0293211bf2e8e97dc7db49c34e8ac1ef1e6e19013183d8518345959\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T18:56:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:29Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:52Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:52 crc kubenswrapper[4792]: I0929 18:57:52.676348 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ba8b9fef5faf6504a0e363f092cc9f60b03723775a0a0624b6302b3dac43a7ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:52Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:52 crc kubenswrapper[4792]: I0929 18:57:52.686459 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:52 crc kubenswrapper[4792]: I0929 18:57:52.686536 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:52 crc kubenswrapper[4792]: I0929 18:57:52.686558 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:52 crc kubenswrapper[4792]: I0929 18:57:52.686589 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:52 crc kubenswrapper[4792]: I0929 18:57:52.686609 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:52Z","lastTransitionTime":"2025-09-29T18:57:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:52 crc kubenswrapper[4792]: I0929 18:57:52.694288 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8067c4cf598ce2d361c4a76b51ef3cf14d1fc84ad7ee193d76e20cd980f197be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://693ef3ee15f0b8762a16adc20435397e073dea4b0028f4175899cb956eaab303\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:52Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:52 crc kubenswrapper[4792]: I0929 18:57:52.715898 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-hr4cm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"716c5fdd-0e02-4066-9210-93d805b6fe81\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:53Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:53Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7b64445ce1e067504326c5005136522f885ba8796579cfb651019d2372a89173\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://46a3cf64e8fd5f5c75be0dd56175bd00e95e2780c73e39558e3b68ca1e6a44bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3408e50d82d1e7f50d9cd4fb2b4e078059bbc4daba10ca93c3cab56d4fe190be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a5023531ae972c8f19f5fbf8cdb3c4040f1b63d5d7b9d00e885607f0f84c88a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e9625b3628f291ecaa686da104d719695bd8c46eb46d08f9eccab27a2013627\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3a44c0899a9afeaa74bb22565c3f9514603ce1b83f9794539f677d067785925\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c8f292dcc3508a8a9eee89bc5ade8d636ca7f748ad3ac0af62bf8f356dace386\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1121dcf30430fdbc7f6353dacdf1f0233d9053a2acfd8e7e248bc6e9faaf66e2\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T18:57:20Z\\\",\\\"message\\\":\\\".861680 6354 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:20Z is after 2025-08-24T17:21:41Z]\\\\nI0929 18:57:20.861689 6354 base_network_controller_pods.go:477] [default/openshift-network-diagnostics/network-check-source-55646444c4-trplf] creating logical port openshift-network-diagnostics_network-check-source-55646444c4-trplf for pod on switch crc\\\\nI0929 18:57:20.861690 6354 services_controller.go:451] Built service openshift-kube-scheduler-operator/metrics cluster-wide LB for network=default: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-kube-scheduler-operator/metrics_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", E\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T18:57:20Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c8f292dcc3508a8a9eee89bc5ade8d636ca7f748ad3ac0af62bf8f356dace386\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T18:57:52Z\\\",\\\"message\\\":\\\":\\\\\\\"10.217.5.254\\\\\\\", Port:443, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI0929 18:57:52.076409 6735 obj_retry.go:303] Retry object setup: *v1.Pod openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-rr4g5\\\\nI0929 18:57:52.078176 6735 obj_retry.go:365] Adding new object: *v1.Pod openshift-network-diagnostics/network-check-source-55646444c4-trplf\\\\nI0929 18:57:52.078181 6735 obj_retry.go:365] Adding new object: *v1.Pod openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-rr4g5\\\\nI0929 18:57:52.078190 6735 ovn.go:134] Ensuring zone local for Pod openshift-network-diagnostics/network-check-source-55646444c4-trplf in node crc\\\\nI0929 18:57:52.078244 6735 base_network_controller_pods.go:477] [default/openshift-network-diagnostics/network-check-source-55646444c4-trplf] creating logical port openshift-network-diagnostics_network-check-source-55646444c4-trplf for pod on switch crc\\\\nI0929 18:57:52.077957 6735 obj_retry.go:386] Retry successful for *v1.Pod openshift-multus/multus-additional-cni-plugins-rqbjv after 0 failed attempt(s)\\\\nI0929 18:57:52.078272 6735 default_network_controller.go:776] Recording success event on pod openshift-multus/multus-additional-cni-plugins-rqbjv\\\\nI0929 18:57:52.078267 \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T18:57:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7138892e31e3d1949d0ae4789515fc0fd9868469eb14de1464a2f59786b85f08\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d0516004c2ea4a5711f5e00dcfa01fd5c8d0c0d0d60fd31b0d7da586cd83a820\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d0516004c2ea4a5711f5e00dcfa01fd5c8d0c0d0d60fd31b0d7da586cd83a820\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T18:56:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:53Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-hr4cm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:52Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:52 crc kubenswrapper[4792]: I0929 18:57:52.732523 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-5hwvp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"100876d3-2539-47f1-91fa-0f91456ccac1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e36db1c84da57d66a764493ff741136d4bec9e23eb8f9f9517fd82dd9f829e8c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3ff4d11cfba0349ddf3f5a14c525716cfdff95c71698634e8feca328d6e41e2d\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T18:57:41Z\\\",\\\"message\\\":\\\"2025-09-29T18:56:56+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_bcd172c3-8b42-4b95-9328-75a122f6b049\\\\n2025-09-29T18:56:56+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_bcd172c3-8b42-4b95-9328-75a122f6b049 to /host/opt/cni/bin/\\\\n2025-09-29T18:56:56Z [verbose] multus-daemon started\\\\n2025-09-29T18:56:56Z [verbose] Readiness Indicator file check\\\\n2025-09-29T18:57:41Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:54Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:57:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfblz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:53Z\\\"}}\" for pod \"openshift-multus\"/\"multus-5hwvp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:52Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:52 crc kubenswrapper[4792]: I0929 18:57:52.747680 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4df360a8-146c-4e9e-8e52-498553bdf779\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1f4bce20bf980ce88d41df2f3c40acfc93739122e3173ea15ceb0122219338f8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d2e3b84a17bd084dfa82505af7e250aea64057db67fdf494d8653ba30a883b6d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c31040d1362524ea1a3ee9961c3eb7b97c7c76709465df7590129430ddd3bb4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://db5ac0d0d3d08ebd2278af02df1e4639df5c635bfee2ec6fb8293e18c648af76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://db5ac0d0d3d08ebd2278af02df1e4639df5c635bfee2ec6fb8293e18c648af76\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T18:56:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:30Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:29Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:52Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:52 crc kubenswrapper[4792]: I0929 18:57:52.762726 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:52Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:52 crc kubenswrapper[4792]: I0929 18:57:52.778647 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:52Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:52 crc kubenswrapper[4792]: I0929 18:57:52.789529 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:52 crc kubenswrapper[4792]: I0929 18:57:52.789612 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:52 crc kubenswrapper[4792]: I0929 18:57:52.789636 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:52 crc kubenswrapper[4792]: I0929 18:57:52.789680 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:52 crc kubenswrapper[4792]: I0929 18:57:52.789708 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:52Z","lastTransitionTime":"2025-09-29T18:57:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:52 crc kubenswrapper[4792]: I0929 18:57:52.794374 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-c228l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc16dcda-372e-4aac-8c12-148bf93e8783\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://af2529526423852e215c3201a4d8807a880e07e9cf71d593f304a4a3c99900eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6mz5l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:52Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-c228l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:52Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:52 crc kubenswrapper[4792]: I0929 18:57:52.810141 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-rr4g5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1762a3e4-6068-48d9-9b1d-bd5b893803bb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7d6bc8edd4388d2cc81a9741b23060a4982a85a1ac8ec23f2052436cff8cd7fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:57:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xnnkn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8053630e82cf267f19de300922033d2cade4b754707ca7c0d7fbfd7e4957eefc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:57:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xnnkn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:57:06Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-rr4g5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:52Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:52 crc kubenswrapper[4792]: I0929 18:57:52.815586 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 18:57:52 crc kubenswrapper[4792]: E0929 18:57:52.815877 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 18:58:56.815832548 +0000 UTC m=+148.809139934 (durationBeforeRetry 1m4s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 18:57:52 crc kubenswrapper[4792]: I0929 18:57:52.816007 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 18:57:52 crc kubenswrapper[4792]: E0929 18:57:52.816204 4792 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Sep 29 18:57:52 crc kubenswrapper[4792]: E0929 18:57:52.816309 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-09-29 18:58:56.816282671 +0000 UTC m=+148.809590107 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Sep 29 18:57:52 crc kubenswrapper[4792]: I0929 18:57:52.893117 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:52 crc kubenswrapper[4792]: I0929 18:57:52.893583 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:52 crc kubenswrapper[4792]: I0929 18:57:52.893596 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:52 crc kubenswrapper[4792]: I0929 18:57:52.893617 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:52 crc kubenswrapper[4792]: I0929 18:57:52.894048 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:52Z","lastTransitionTime":"2025-09-29T18:57:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:52 crc kubenswrapper[4792]: I0929 18:57:52.916887 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 18:57:52 crc kubenswrapper[4792]: I0929 18:57:52.916939 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 18:57:52 crc kubenswrapper[4792]: I0929 18:57:52.916977 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 18:57:52 crc kubenswrapper[4792]: E0929 18:57:52.917100 4792 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Sep 29 18:57:52 crc kubenswrapper[4792]: E0929 18:57:52.917168 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-09-29 18:58:56.917151251 +0000 UTC m=+148.910458647 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Sep 29 18:57:52 crc kubenswrapper[4792]: E0929 18:57:52.917168 4792 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Sep 29 18:57:52 crc kubenswrapper[4792]: E0929 18:57:52.917106 4792 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Sep 29 18:57:52 crc kubenswrapper[4792]: E0929 18:57:52.917208 4792 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Sep 29 18:57:52 crc kubenswrapper[4792]: E0929 18:57:52.917228 4792 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Sep 29 18:57:52 crc kubenswrapper[4792]: E0929 18:57:52.917244 4792 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 29 18:57:52 crc kubenswrapper[4792]: E0929 18:57:52.917248 4792 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 29 18:57:52 crc kubenswrapper[4792]: E0929 18:57:52.917277 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-09-29 18:58:56.917268225 +0000 UTC m=+148.910575621 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 29 18:57:52 crc kubenswrapper[4792]: E0929 18:57:52.917342 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-09-29 18:58:56.917304566 +0000 UTC m=+148.910612162 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 29 18:57:52 crc kubenswrapper[4792]: I0929 18:57:52.997303 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:52 crc kubenswrapper[4792]: I0929 18:57:52.997422 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:52 crc kubenswrapper[4792]: I0929 18:57:52.997450 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:52 crc kubenswrapper[4792]: I0929 18:57:52.997482 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:52 crc kubenswrapper[4792]: I0929 18:57:52.997503 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:52Z","lastTransitionTime":"2025-09-29T18:57:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:53 crc kubenswrapper[4792]: I0929 18:57:53.014460 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-v5b2m" Sep 29 18:57:53 crc kubenswrapper[4792]: I0929 18:57:53.014549 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 18:57:53 crc kubenswrapper[4792]: E0929 18:57:53.014624 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-v5b2m" podUID="fd292349-0e5a-4d80-b163-193aa43c98db" Sep 29 18:57:53 crc kubenswrapper[4792]: I0929 18:57:53.014556 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 18:57:53 crc kubenswrapper[4792]: I0929 18:57:53.014697 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 18:57:53 crc kubenswrapper[4792]: E0929 18:57:53.014756 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 18:57:53 crc kubenswrapper[4792]: E0929 18:57:53.014794 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 18:57:53 crc kubenswrapper[4792]: E0929 18:57:53.014993 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 18:57:53 crc kubenswrapper[4792]: I0929 18:57:53.101080 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:53 crc kubenswrapper[4792]: I0929 18:57:53.101130 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:53 crc kubenswrapper[4792]: I0929 18:57:53.101179 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:53 crc kubenswrapper[4792]: I0929 18:57:53.101210 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:53 crc kubenswrapper[4792]: I0929 18:57:53.101222 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:53Z","lastTransitionTime":"2025-09-29T18:57:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:53 crc kubenswrapper[4792]: I0929 18:57:53.202829 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:53 crc kubenswrapper[4792]: I0929 18:57:53.202875 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:53 crc kubenswrapper[4792]: I0929 18:57:53.202885 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:53 crc kubenswrapper[4792]: I0929 18:57:53.202898 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:53 crc kubenswrapper[4792]: I0929 18:57:53.202908 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:53Z","lastTransitionTime":"2025-09-29T18:57:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:53 crc kubenswrapper[4792]: I0929 18:57:53.306398 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:53 crc kubenswrapper[4792]: I0929 18:57:53.306458 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:53 crc kubenswrapper[4792]: I0929 18:57:53.306468 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:53 crc kubenswrapper[4792]: I0929 18:57:53.306485 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:53 crc kubenswrapper[4792]: I0929 18:57:53.306497 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:53Z","lastTransitionTime":"2025-09-29T18:57:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:53 crc kubenswrapper[4792]: I0929 18:57:53.409122 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:53 crc kubenswrapper[4792]: I0929 18:57:53.409170 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:53 crc kubenswrapper[4792]: I0929 18:57:53.409180 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:53 crc kubenswrapper[4792]: I0929 18:57:53.409195 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:53 crc kubenswrapper[4792]: I0929 18:57:53.409204 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:53Z","lastTransitionTime":"2025-09-29T18:57:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:53 crc kubenswrapper[4792]: I0929 18:57:53.511372 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:53 crc kubenswrapper[4792]: I0929 18:57:53.511424 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:53 crc kubenswrapper[4792]: I0929 18:57:53.511435 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:53 crc kubenswrapper[4792]: I0929 18:57:53.511451 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:53 crc kubenswrapper[4792]: I0929 18:57:53.511463 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:53Z","lastTransitionTime":"2025-09-29T18:57:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:53 crc kubenswrapper[4792]: I0929 18:57:53.526781 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-hr4cm_716c5fdd-0e02-4066-9210-93d805b6fe81/ovnkube-controller/3.log" Sep 29 18:57:53 crc kubenswrapper[4792]: I0929 18:57:53.531069 4792 scope.go:117] "RemoveContainer" containerID="c8f292dcc3508a8a9eee89bc5ade8d636ca7f748ad3ac0af62bf8f356dace386" Sep 29 18:57:53 crc kubenswrapper[4792]: E0929 18:57:53.531216 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-hr4cm_openshift-ovn-kubernetes(716c5fdd-0e02-4066-9210-93d805b6fe81)\"" pod="openshift-ovn-kubernetes/ovnkube-node-hr4cm" podUID="716c5fdd-0e02-4066-9210-93d805b6fe81" Sep 29 18:57:53 crc kubenswrapper[4792]: I0929 18:57:53.545690 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:53Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:53 crc kubenswrapper[4792]: I0929 18:57:53.555082 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-4gmtk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b84b9e91-b50e-4271-bfc8-be15652128c5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b32084075b7423c8211ca56595a2eb11add581b500043804cb09f13d07788bd6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lc999\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:55Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-4gmtk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:53Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:53 crc kubenswrapper[4792]: I0929 18:57:53.567219 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"49187618-8fed-4b0f-bdf8-800408f708fc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://325b543480e9e1abd49c6ce98398a79ef51983b8035774b2e88447ee547733c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://12d3875b8db9620798f766024b1bc43b78759f42e467b67aaf87f0b0154a8fad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://100ab44da711fddded7f88aa053b6a47d1c8302557d9ae6a56d4f744140e34bd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://af34e705a941f92c031edf3d214a902640010036401914f60e598a46043d5eb3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:29Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:53Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:53 crc kubenswrapper[4792]: I0929 18:57:53.580567 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:52Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:52Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cb29207afd9a5fb06242890aaf6d32f2f789cbf824b0246706e7214486ac529c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:53Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:53 crc kubenswrapper[4792]: I0929 18:57:53.590577 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0ae66548-086e-4ca9-bd6f-281ce46e7557\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b27d8e307d9f6545acd48d9a838dc98fec84ca2e48b357966af22144b8cd415f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://305645f1f10b20984067c3d0d32bc9a5936e191faecff2bb494be005fc471c65\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:53Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-p5q59\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:53Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:53 crc kubenswrapper[4792]: I0929 18:57:53.611355 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-rqbjv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"67c58ee5-e056-4e3e-91ed-a116350f2408\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://17470ef608c6b717c0346349c1e72046e200b3879df2772778878b0e83c05b7e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:57:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ms9xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cdd799a430b3a444b7ba74ae8c285de28790049390a462485812fe117f9dfbe8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cdd799a430b3a444b7ba74ae8c285de28790049390a462485812fe117f9dfbe8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T18:56:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ms9xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1d779fed3cf67ba40d6664f26d829858ec14749c48c09678b73d7fb8fe73c827\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1d779fed3cf67ba40d6664f26d829858ec14749c48c09678b73d7fb8fe73c827\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T18:56:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ms9xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3094a1172df2fd98e699c4d368a14584e51bce43389c9c6432e24e78d460a3eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3094a1172df2fd98e699c4d368a14584e51bce43389c9c6432e24e78d460a3eb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T18:56:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ms9xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://79dfa5c03ec31df7b6477646c437b7490658801c0b8f7fac5e9149e4c7a882bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://79dfa5c03ec31df7b6477646c437b7490658801c0b8f7fac5e9149e4c7a882bf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T18:56:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ms9xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://aed6e427a87d4a4617a9d1c9a4d37cf2f9815d1759336026545d563b1f9b6976\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aed6e427a87d4a4617a9d1c9a4d37cf2f9815d1759336026545d563b1f9b6976\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T18:57:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ms9xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0830e9f46c282fc2be6beeea2654758eb0b3a0a86b802f495928c846bc49f7ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0830e9f46c282fc2be6beeea2654758eb0b3a0a86b802f495928c846bc49f7ed\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T18:57:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T18:57:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ms9xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:53Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-rqbjv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:53Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:53 crc kubenswrapper[4792]: I0929 18:57:53.613646 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:53 crc kubenswrapper[4792]: I0929 18:57:53.613692 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:53 crc kubenswrapper[4792]: I0929 18:57:53.613705 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:53 crc kubenswrapper[4792]: I0929 18:57:53.613730 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:53 crc kubenswrapper[4792]: I0929 18:57:53.613756 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:53Z","lastTransitionTime":"2025-09-29T18:57:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:53 crc kubenswrapper[4792]: I0929 18:57:53.622172 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-v5b2m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fd292349-0e5a-4d80-b163-193aa43c98db\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:07Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:07Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:07Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d8ps7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d8ps7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:57:07Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-v5b2m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:53Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:53 crc kubenswrapper[4792]: I0929 18:57:53.635280 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"10bc9cb6-78d3-43a6-8276-db1cb1c116e0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://238035b6ad975064a44e7e9e760ae9f09c9ff2735ecc69f65df3fc3176f6d3b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://659f798faed7d5c35bf7959b8e42a37f2289854714d513962716cb73a0674a27\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://38d29fa86782b007a60cbc1ea8f2ccc250dabb38c84eb61931b05fbc170e6538\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f292b6e3b4a31cc851066c3112871836e2c896dea8913da0d3c5579fe5ebb65\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0fdb8c0687f0a5ce31078f6d7a9b643c41ad23199eff4b2878403ee5fd31f69f\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T18:56:45Z\\\",\\\"message\\\":\\\"9 18:56:45.310775 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0929 18:56:45.310981 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI0929 18:56:45.312802 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0929 18:56:45.312836 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0929 18:56:45.312870 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0929 18:56:45.312900 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0929 18:56:45.312941 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI0929 18:56:45.312944 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0929 18:56:45.312985 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI0929 18:56:45.313033 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI0929 18:56:45.313121 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-682747971/tls.crt::/tmp/serving-cert-682747971/tls.key\\\\\\\"\\\\nI0929 18:56:45.313163 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-682747971/tls.crt::/tmp/serving-cert-682747971/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1759172204\\\\\\\\\\\\\\\" (2025-09-29 18:56:43 +0000 UTC to 2025-10-29 18:56:44 +0000 UTC (now=2025-09-29 18:56:45.313121069 +0000 UTC))\\\\\\\"\\\\nF0929 18:56:45.313206 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:44Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:57:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://07171a8c3c7812c016ee534ff1332f697d2b2cdfa70fc9d94ae6a5f312e0e433\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:32Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6e733b73d0293211bf2e8e97dc7db49c34e8ac1ef1e6e19013183d8518345959\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6e733b73d0293211bf2e8e97dc7db49c34e8ac1ef1e6e19013183d8518345959\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T18:56:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:29Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:53Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:53 crc kubenswrapper[4792]: I0929 18:57:53.646362 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ba8b9fef5faf6504a0e363f092cc9f60b03723775a0a0624b6302b3dac43a7ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:53Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:53 crc kubenswrapper[4792]: I0929 18:57:53.657959 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8067c4cf598ce2d361c4a76b51ef3cf14d1fc84ad7ee193d76e20cd980f197be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://693ef3ee15f0b8762a16adc20435397e073dea4b0028f4175899cb956eaab303\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:53Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:53 crc kubenswrapper[4792]: I0929 18:57:53.674381 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-hr4cm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"716c5fdd-0e02-4066-9210-93d805b6fe81\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:53Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:53Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7b64445ce1e067504326c5005136522f885ba8796579cfb651019d2372a89173\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://46a3cf64e8fd5f5c75be0dd56175bd00e95e2780c73e39558e3b68ca1e6a44bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3408e50d82d1e7f50d9cd4fb2b4e078059bbc4daba10ca93c3cab56d4fe190be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a5023531ae972c8f19f5fbf8cdb3c4040f1b63d5d7b9d00e885607f0f84c88a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e9625b3628f291ecaa686da104d719695bd8c46eb46d08f9eccab27a2013627\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3a44c0899a9afeaa74bb22565c3f9514603ce1b83f9794539f677d067785925\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c8f292dcc3508a8a9eee89bc5ade8d636ca7f748ad3ac0af62bf8f356dace386\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c8f292dcc3508a8a9eee89bc5ade8d636ca7f748ad3ac0af62bf8f356dace386\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T18:57:52Z\\\",\\\"message\\\":\\\":\\\\\\\"10.217.5.254\\\\\\\", Port:443, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI0929 18:57:52.076409 6735 obj_retry.go:303] Retry object setup: *v1.Pod openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-rr4g5\\\\nI0929 18:57:52.078176 6735 obj_retry.go:365] Adding new object: *v1.Pod openshift-network-diagnostics/network-check-source-55646444c4-trplf\\\\nI0929 18:57:52.078181 6735 obj_retry.go:365] Adding new object: *v1.Pod openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-rr4g5\\\\nI0929 18:57:52.078190 6735 ovn.go:134] Ensuring zone local for Pod openshift-network-diagnostics/network-check-source-55646444c4-trplf in node crc\\\\nI0929 18:57:52.078244 6735 base_network_controller_pods.go:477] [default/openshift-network-diagnostics/network-check-source-55646444c4-trplf] creating logical port openshift-network-diagnostics_network-check-source-55646444c4-trplf for pod on switch crc\\\\nI0929 18:57:52.077957 6735 obj_retry.go:386] Retry successful for *v1.Pod openshift-multus/multus-additional-cni-plugins-rqbjv after 0 failed attempt(s)\\\\nI0929 18:57:52.078272 6735 default_network_controller.go:776] Recording success event on pod openshift-multus/multus-additional-cni-plugins-rqbjv\\\\nI0929 18:57:52.078267 \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T18:57:51Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-hr4cm_openshift-ovn-kubernetes(716c5fdd-0e02-4066-9210-93d805b6fe81)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7138892e31e3d1949d0ae4789515fc0fd9868469eb14de1464a2f59786b85f08\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d0516004c2ea4a5711f5e00dcfa01fd5c8d0c0d0d60fd31b0d7da586cd83a820\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d0516004c2ea4a5711f5e00dcfa01fd5c8d0c0d0d60fd31b0d7da586cd83a820\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T18:56:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-72xxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:53Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-hr4cm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:53Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:53 crc kubenswrapper[4792]: I0929 18:57:53.699291 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-5hwvp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"100876d3-2539-47f1-91fa-0f91456ccac1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e36db1c84da57d66a764493ff741136d4bec9e23eb8f9f9517fd82dd9f829e8c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3ff4d11cfba0349ddf3f5a14c525716cfdff95c71698634e8feca328d6e41e2d\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T18:57:41Z\\\",\\\"message\\\":\\\"2025-09-29T18:56:56+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_bcd172c3-8b42-4b95-9328-75a122f6b049\\\\n2025-09-29T18:56:56+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_bcd172c3-8b42-4b95-9328-75a122f6b049 to /host/opt/cni/bin/\\\\n2025-09-29T18:56:56Z [verbose] multus-daemon started\\\\n2025-09-29T18:56:56Z [verbose] Readiness Indicator file check\\\\n2025-09-29T18:57:41Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:54Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:57:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfblz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:53Z\\\"}}\" for pod \"openshift-multus\"/\"multus-5hwvp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:53Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:53 crc kubenswrapper[4792]: I0929 18:57:53.709434 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4df360a8-146c-4e9e-8e52-498553bdf779\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1f4bce20bf980ce88d41df2f3c40acfc93739122e3173ea15ceb0122219338f8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d2e3b84a17bd084dfa82505af7e250aea64057db67fdf494d8653ba30a883b6d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c31040d1362524ea1a3ee9961c3eb7b97c7c76709465df7590129430ddd3bb4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://db5ac0d0d3d08ebd2278af02df1e4639df5c635bfee2ec6fb8293e18c648af76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://db5ac0d0d3d08ebd2278af02df1e4639df5c635bfee2ec6fb8293e18c648af76\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T18:56:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T18:56:30Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:29Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:53Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:53 crc kubenswrapper[4792]: I0929 18:57:53.716404 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:53 crc kubenswrapper[4792]: I0929 18:57:53.716437 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:53 crc kubenswrapper[4792]: I0929 18:57:53.716445 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:53 crc kubenswrapper[4792]: I0929 18:57:53.716461 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:53 crc kubenswrapper[4792]: I0929 18:57:53.716473 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:53Z","lastTransitionTime":"2025-09-29T18:57:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:53 crc kubenswrapper[4792]: I0929 18:57:53.722242 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:53Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:53 crc kubenswrapper[4792]: I0929 18:57:53.736031 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:53Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:53 crc kubenswrapper[4792]: I0929 18:57:53.745031 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-c228l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc16dcda-372e-4aac-8c12-148bf93e8783\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://af2529526423852e215c3201a4d8807a880e07e9cf71d593f304a4a3c99900eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:56:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6mz5l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:56:52Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-c228l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:53Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:53 crc kubenswrapper[4792]: I0929 18:57:53.754973 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-rr4g5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1762a3e4-6068-48d9-9b1d-bd5b893803bb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7d6bc8edd4388d2cc81a9741b23060a4982a85a1ac8ec23f2052436cff8cd7fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:57:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xnnkn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8053630e82cf267f19de300922033d2cade4b754707ca7c0d7fbfd7e4957eefc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T18:57:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xnnkn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T18:57:06Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-rr4g5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:53Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:53 crc kubenswrapper[4792]: I0929 18:57:53.818603 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:53 crc kubenswrapper[4792]: I0929 18:57:53.818642 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:53 crc kubenswrapper[4792]: I0929 18:57:53.818651 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:53 crc kubenswrapper[4792]: I0929 18:57:53.818665 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:53 crc kubenswrapper[4792]: I0929 18:57:53.818675 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:53Z","lastTransitionTime":"2025-09-29T18:57:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:53 crc kubenswrapper[4792]: I0929 18:57:53.920961 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:53 crc kubenswrapper[4792]: I0929 18:57:53.921007 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:53 crc kubenswrapper[4792]: I0929 18:57:53.921017 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:53 crc kubenswrapper[4792]: I0929 18:57:53.921033 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:53 crc kubenswrapper[4792]: I0929 18:57:53.921044 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:53Z","lastTransitionTime":"2025-09-29T18:57:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:54 crc kubenswrapper[4792]: I0929 18:57:54.023405 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:54 crc kubenswrapper[4792]: I0929 18:57:54.023456 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:54 crc kubenswrapper[4792]: I0929 18:57:54.023465 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:54 crc kubenswrapper[4792]: I0929 18:57:54.023475 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:54 crc kubenswrapper[4792]: I0929 18:57:54.023484 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:54Z","lastTransitionTime":"2025-09-29T18:57:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:54 crc kubenswrapper[4792]: I0929 18:57:54.125058 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:54 crc kubenswrapper[4792]: I0929 18:57:54.125094 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:54 crc kubenswrapper[4792]: I0929 18:57:54.125104 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:54 crc kubenswrapper[4792]: I0929 18:57:54.125118 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:54 crc kubenswrapper[4792]: I0929 18:57:54.125130 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:54Z","lastTransitionTime":"2025-09-29T18:57:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:54 crc kubenswrapper[4792]: I0929 18:57:54.226907 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:54 crc kubenswrapper[4792]: I0929 18:57:54.226947 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:54 crc kubenswrapper[4792]: I0929 18:57:54.226958 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:54 crc kubenswrapper[4792]: I0929 18:57:54.226974 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:54 crc kubenswrapper[4792]: I0929 18:57:54.226985 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:54Z","lastTransitionTime":"2025-09-29T18:57:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:54 crc kubenswrapper[4792]: I0929 18:57:54.329203 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:54 crc kubenswrapper[4792]: I0929 18:57:54.329253 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:54 crc kubenswrapper[4792]: I0929 18:57:54.329263 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:54 crc kubenswrapper[4792]: I0929 18:57:54.329279 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:54 crc kubenswrapper[4792]: I0929 18:57:54.329291 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:54Z","lastTransitionTime":"2025-09-29T18:57:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:54 crc kubenswrapper[4792]: I0929 18:57:54.431282 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:54 crc kubenswrapper[4792]: I0929 18:57:54.431327 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:54 crc kubenswrapper[4792]: I0929 18:57:54.431339 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:54 crc kubenswrapper[4792]: I0929 18:57:54.431360 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:54 crc kubenswrapper[4792]: I0929 18:57:54.431374 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:54Z","lastTransitionTime":"2025-09-29T18:57:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:54 crc kubenswrapper[4792]: I0929 18:57:54.532894 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:54 crc kubenswrapper[4792]: I0929 18:57:54.532929 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:54 crc kubenswrapper[4792]: I0929 18:57:54.532938 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:54 crc kubenswrapper[4792]: I0929 18:57:54.532951 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:54 crc kubenswrapper[4792]: I0929 18:57:54.532960 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:54Z","lastTransitionTime":"2025-09-29T18:57:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:54 crc kubenswrapper[4792]: I0929 18:57:54.634805 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:54 crc kubenswrapper[4792]: I0929 18:57:54.634833 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:54 crc kubenswrapper[4792]: I0929 18:57:54.634842 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:54 crc kubenswrapper[4792]: I0929 18:57:54.634870 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:54 crc kubenswrapper[4792]: I0929 18:57:54.634879 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:54Z","lastTransitionTime":"2025-09-29T18:57:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:54 crc kubenswrapper[4792]: I0929 18:57:54.737429 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:54 crc kubenswrapper[4792]: I0929 18:57:54.737462 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:54 crc kubenswrapper[4792]: I0929 18:57:54.737471 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:54 crc kubenswrapper[4792]: I0929 18:57:54.737488 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:54 crc kubenswrapper[4792]: I0929 18:57:54.737497 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:54Z","lastTransitionTime":"2025-09-29T18:57:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:54 crc kubenswrapper[4792]: I0929 18:57:54.839843 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:54 crc kubenswrapper[4792]: I0929 18:57:54.839893 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:54 crc kubenswrapper[4792]: I0929 18:57:54.839906 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:54 crc kubenswrapper[4792]: I0929 18:57:54.839920 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:54 crc kubenswrapper[4792]: I0929 18:57:54.839931 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:54Z","lastTransitionTime":"2025-09-29T18:57:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:54 crc kubenswrapper[4792]: I0929 18:57:54.942968 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:54 crc kubenswrapper[4792]: I0929 18:57:54.943220 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:54 crc kubenswrapper[4792]: I0929 18:57:54.943292 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:54 crc kubenswrapper[4792]: I0929 18:57:54.943372 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:54 crc kubenswrapper[4792]: I0929 18:57:54.943434 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:54Z","lastTransitionTime":"2025-09-29T18:57:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:55 crc kubenswrapper[4792]: I0929 18:57:55.015353 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 18:57:55 crc kubenswrapper[4792]: I0929 18:57:55.015438 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-v5b2m" Sep 29 18:57:55 crc kubenswrapper[4792]: E0929 18:57:55.015476 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 18:57:55 crc kubenswrapper[4792]: I0929 18:57:55.015491 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 18:57:55 crc kubenswrapper[4792]: I0929 18:57:55.015525 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 18:57:55 crc kubenswrapper[4792]: E0929 18:57:55.015582 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-v5b2m" podUID="fd292349-0e5a-4d80-b163-193aa43c98db" Sep 29 18:57:55 crc kubenswrapper[4792]: E0929 18:57:55.015657 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 18:57:55 crc kubenswrapper[4792]: E0929 18:57:55.015721 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 18:57:55 crc kubenswrapper[4792]: I0929 18:57:55.045898 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:55 crc kubenswrapper[4792]: I0929 18:57:55.045932 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:55 crc kubenswrapper[4792]: I0929 18:57:55.045941 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:55 crc kubenswrapper[4792]: I0929 18:57:55.045955 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:55 crc kubenswrapper[4792]: I0929 18:57:55.045965 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:55Z","lastTransitionTime":"2025-09-29T18:57:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:55 crc kubenswrapper[4792]: I0929 18:57:55.148437 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:55 crc kubenswrapper[4792]: I0929 18:57:55.148726 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:55 crc kubenswrapper[4792]: I0929 18:57:55.148889 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:55 crc kubenswrapper[4792]: I0929 18:57:55.148987 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:55 crc kubenswrapper[4792]: I0929 18:57:55.149077 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:55Z","lastTransitionTime":"2025-09-29T18:57:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:55 crc kubenswrapper[4792]: I0929 18:57:55.251160 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:55 crc kubenswrapper[4792]: I0929 18:57:55.251196 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:55 crc kubenswrapper[4792]: I0929 18:57:55.251204 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:55 crc kubenswrapper[4792]: I0929 18:57:55.251216 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:55 crc kubenswrapper[4792]: I0929 18:57:55.251224 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:55Z","lastTransitionTime":"2025-09-29T18:57:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:55 crc kubenswrapper[4792]: I0929 18:57:55.353527 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:55 crc kubenswrapper[4792]: I0929 18:57:55.353795 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:55 crc kubenswrapper[4792]: I0929 18:57:55.353937 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:55 crc kubenswrapper[4792]: I0929 18:57:55.354031 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:55 crc kubenswrapper[4792]: I0929 18:57:55.354114 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:55Z","lastTransitionTime":"2025-09-29T18:57:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:55 crc kubenswrapper[4792]: I0929 18:57:55.456654 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:55 crc kubenswrapper[4792]: I0929 18:57:55.456949 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:55 crc kubenswrapper[4792]: I0929 18:57:55.457057 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:55 crc kubenswrapper[4792]: I0929 18:57:55.457140 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:55 crc kubenswrapper[4792]: I0929 18:57:55.457217 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:55Z","lastTransitionTime":"2025-09-29T18:57:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:55 crc kubenswrapper[4792]: I0929 18:57:55.560586 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:55 crc kubenswrapper[4792]: I0929 18:57:55.561015 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:55 crc kubenswrapper[4792]: I0929 18:57:55.561170 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:55 crc kubenswrapper[4792]: I0929 18:57:55.561365 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:55 crc kubenswrapper[4792]: I0929 18:57:55.561795 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:55Z","lastTransitionTime":"2025-09-29T18:57:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:55 crc kubenswrapper[4792]: I0929 18:57:55.665705 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:55 crc kubenswrapper[4792]: I0929 18:57:55.666229 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:55 crc kubenswrapper[4792]: I0929 18:57:55.666443 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:55 crc kubenswrapper[4792]: I0929 18:57:55.666596 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:55 crc kubenswrapper[4792]: I0929 18:57:55.666732 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:55Z","lastTransitionTime":"2025-09-29T18:57:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:55 crc kubenswrapper[4792]: I0929 18:57:55.770896 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:55 crc kubenswrapper[4792]: I0929 18:57:55.772023 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:55 crc kubenswrapper[4792]: I0929 18:57:55.772057 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:55 crc kubenswrapper[4792]: I0929 18:57:55.772082 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:55 crc kubenswrapper[4792]: I0929 18:57:55.772100 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:55Z","lastTransitionTime":"2025-09-29T18:57:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:55 crc kubenswrapper[4792]: I0929 18:57:55.876092 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:55 crc kubenswrapper[4792]: I0929 18:57:55.876171 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:55 crc kubenswrapper[4792]: I0929 18:57:55.876195 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:55 crc kubenswrapper[4792]: I0929 18:57:55.876223 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:55 crc kubenswrapper[4792]: I0929 18:57:55.876244 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:55Z","lastTransitionTime":"2025-09-29T18:57:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:55 crc kubenswrapper[4792]: I0929 18:57:55.979141 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:55 crc kubenswrapper[4792]: I0929 18:57:55.979180 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:55 crc kubenswrapper[4792]: I0929 18:57:55.979189 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:55 crc kubenswrapper[4792]: I0929 18:57:55.979205 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:55 crc kubenswrapper[4792]: I0929 18:57:55.979217 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:55Z","lastTransitionTime":"2025-09-29T18:57:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:56 crc kubenswrapper[4792]: I0929 18:57:56.036167 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-etcd/etcd-crc"] Sep 29 18:57:56 crc kubenswrapper[4792]: I0929 18:57:56.082915 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:56 crc kubenswrapper[4792]: I0929 18:57:56.083351 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:56 crc kubenswrapper[4792]: I0929 18:57:56.083546 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:56 crc kubenswrapper[4792]: I0929 18:57:56.083899 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:56 crc kubenswrapper[4792]: I0929 18:57:56.084170 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:56Z","lastTransitionTime":"2025-09-29T18:57:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:56 crc kubenswrapper[4792]: I0929 18:57:56.188053 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:56 crc kubenswrapper[4792]: I0929 18:57:56.188189 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:56 crc kubenswrapper[4792]: I0929 18:57:56.188209 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:56 crc kubenswrapper[4792]: I0929 18:57:56.188240 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:56 crc kubenswrapper[4792]: I0929 18:57:56.188295 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:56Z","lastTransitionTime":"2025-09-29T18:57:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:56 crc kubenswrapper[4792]: I0929 18:57:56.292353 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:56 crc kubenswrapper[4792]: I0929 18:57:56.292454 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:56 crc kubenswrapper[4792]: I0929 18:57:56.292472 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:56 crc kubenswrapper[4792]: I0929 18:57:56.292501 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:56 crc kubenswrapper[4792]: I0929 18:57:56.292522 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:56Z","lastTransitionTime":"2025-09-29T18:57:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:56 crc kubenswrapper[4792]: I0929 18:57:56.320590 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:56 crc kubenswrapper[4792]: I0929 18:57:56.321110 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:56 crc kubenswrapper[4792]: I0929 18:57:56.321322 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:56 crc kubenswrapper[4792]: I0929 18:57:56.321599 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:56 crc kubenswrapper[4792]: I0929 18:57:56.321757 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:56Z","lastTransitionTime":"2025-09-29T18:57:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:56 crc kubenswrapper[4792]: E0929 18:57:56.344711 4792 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T18:57:56Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:56Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T18:57:56Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:56Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T18:57:56Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:56Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T18:57:56Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:56Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"2b56982a-4dd9-4681-8997-0d414fe55985\\\",\\\"systemUUID\\\":\\\"798197c6-3029-4938-8b57-256852c71a3e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:56Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:56 crc kubenswrapper[4792]: I0929 18:57:56.350935 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:56 crc kubenswrapper[4792]: I0929 18:57:56.351010 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:56 crc kubenswrapper[4792]: I0929 18:57:56.351034 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:56 crc kubenswrapper[4792]: I0929 18:57:56.351068 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:56 crc kubenswrapper[4792]: I0929 18:57:56.351090 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:56Z","lastTransitionTime":"2025-09-29T18:57:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:56 crc kubenswrapper[4792]: E0929 18:57:56.373629 4792 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T18:57:56Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:56Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T18:57:56Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:56Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T18:57:56Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:56Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T18:57:56Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:56Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"2b56982a-4dd9-4681-8997-0d414fe55985\\\",\\\"systemUUID\\\":\\\"798197c6-3029-4938-8b57-256852c71a3e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:56Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:56 crc kubenswrapper[4792]: I0929 18:57:56.381895 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:56 crc kubenswrapper[4792]: I0929 18:57:56.381959 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:56 crc kubenswrapper[4792]: I0929 18:57:56.381979 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:56 crc kubenswrapper[4792]: I0929 18:57:56.382011 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:56 crc kubenswrapper[4792]: I0929 18:57:56.382032 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:56Z","lastTransitionTime":"2025-09-29T18:57:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:56 crc kubenswrapper[4792]: E0929 18:57:56.405514 4792 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T18:57:56Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:56Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T18:57:56Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:56Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T18:57:56Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:56Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T18:57:56Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:56Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"2b56982a-4dd9-4681-8997-0d414fe55985\\\",\\\"systemUUID\\\":\\\"798197c6-3029-4938-8b57-256852c71a3e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:56Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:56 crc kubenswrapper[4792]: I0929 18:57:56.411417 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:56 crc kubenswrapper[4792]: I0929 18:57:56.411909 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:56 crc kubenswrapper[4792]: I0929 18:57:56.412433 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:56 crc kubenswrapper[4792]: I0929 18:57:56.412643 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:56 crc kubenswrapper[4792]: I0929 18:57:56.412842 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:56Z","lastTransitionTime":"2025-09-29T18:57:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:56 crc kubenswrapper[4792]: E0929 18:57:56.428724 4792 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T18:57:56Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:56Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T18:57:56Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:56Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T18:57:56Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:56Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T18:57:56Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:56Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"2b56982a-4dd9-4681-8997-0d414fe55985\\\",\\\"systemUUID\\\":\\\"798197c6-3029-4938-8b57-256852c71a3e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:56Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:56 crc kubenswrapper[4792]: I0929 18:57:56.433341 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:56 crc kubenswrapper[4792]: I0929 18:57:56.433483 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:56 crc kubenswrapper[4792]: I0929 18:57:56.433585 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:56 crc kubenswrapper[4792]: I0929 18:57:56.433671 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:56 crc kubenswrapper[4792]: I0929 18:57:56.433754 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:56Z","lastTransitionTime":"2025-09-29T18:57:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:56 crc kubenswrapper[4792]: E0929 18:57:56.446581 4792 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T18:57:56Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:56Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T18:57:56Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:56Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T18:57:56Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:56Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T18:57:56Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T18:57:56Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"2b56982a-4dd9-4681-8997-0d414fe55985\\\",\\\"systemUUID\\\":\\\"798197c6-3029-4938-8b57-256852c71a3e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T18:57:56Z is after 2025-08-24T17:21:41Z" Sep 29 18:57:56 crc kubenswrapper[4792]: E0929 18:57:56.446722 4792 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Sep 29 18:57:56 crc kubenswrapper[4792]: I0929 18:57:56.448522 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:56 crc kubenswrapper[4792]: I0929 18:57:56.448567 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:56 crc kubenswrapper[4792]: I0929 18:57:56.448578 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:56 crc kubenswrapper[4792]: I0929 18:57:56.448595 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:56 crc kubenswrapper[4792]: I0929 18:57:56.448606 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:56Z","lastTransitionTime":"2025-09-29T18:57:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:56 crc kubenswrapper[4792]: I0929 18:57:56.551999 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:56 crc kubenswrapper[4792]: I0929 18:57:56.552070 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:56 crc kubenswrapper[4792]: I0929 18:57:56.552088 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:56 crc kubenswrapper[4792]: I0929 18:57:56.552114 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:56 crc kubenswrapper[4792]: I0929 18:57:56.552133 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:56Z","lastTransitionTime":"2025-09-29T18:57:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:56 crc kubenswrapper[4792]: I0929 18:57:56.655488 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:56 crc kubenswrapper[4792]: I0929 18:57:56.655528 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:56 crc kubenswrapper[4792]: I0929 18:57:56.655541 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:56 crc kubenswrapper[4792]: I0929 18:57:56.655557 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:56 crc kubenswrapper[4792]: I0929 18:57:56.655568 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:56Z","lastTransitionTime":"2025-09-29T18:57:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:56 crc kubenswrapper[4792]: I0929 18:57:56.758451 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:56 crc kubenswrapper[4792]: I0929 18:57:56.758493 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:56 crc kubenswrapper[4792]: I0929 18:57:56.758504 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:56 crc kubenswrapper[4792]: I0929 18:57:56.758520 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:56 crc kubenswrapper[4792]: I0929 18:57:56.758531 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:56Z","lastTransitionTime":"2025-09-29T18:57:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:56 crc kubenswrapper[4792]: I0929 18:57:56.861271 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:56 crc kubenswrapper[4792]: I0929 18:57:56.861517 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:56 crc kubenswrapper[4792]: I0929 18:57:56.861557 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:56 crc kubenswrapper[4792]: I0929 18:57:56.861593 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:56 crc kubenswrapper[4792]: I0929 18:57:56.861616 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:56Z","lastTransitionTime":"2025-09-29T18:57:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:56 crc kubenswrapper[4792]: I0929 18:57:56.964586 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:56 crc kubenswrapper[4792]: I0929 18:57:56.964643 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:56 crc kubenswrapper[4792]: I0929 18:57:56.964658 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:56 crc kubenswrapper[4792]: I0929 18:57:56.964677 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:56 crc kubenswrapper[4792]: I0929 18:57:56.964689 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:56Z","lastTransitionTime":"2025-09-29T18:57:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:57 crc kubenswrapper[4792]: I0929 18:57:57.015231 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 18:57:57 crc kubenswrapper[4792]: E0929 18:57:57.015389 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 18:57:57 crc kubenswrapper[4792]: I0929 18:57:57.015659 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 18:57:57 crc kubenswrapper[4792]: I0929 18:57:57.015742 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 18:57:57 crc kubenswrapper[4792]: E0929 18:57:57.015797 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 18:57:57 crc kubenswrapper[4792]: I0929 18:57:57.015273 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-v5b2m" Sep 29 18:57:57 crc kubenswrapper[4792]: E0929 18:57:57.016111 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 18:57:57 crc kubenswrapper[4792]: E0929 18:57:57.016225 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-v5b2m" podUID="fd292349-0e5a-4d80-b163-193aa43c98db" Sep 29 18:57:57 crc kubenswrapper[4792]: I0929 18:57:57.066672 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:57 crc kubenswrapper[4792]: I0929 18:57:57.066975 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:57 crc kubenswrapper[4792]: I0929 18:57:57.067101 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:57 crc kubenswrapper[4792]: I0929 18:57:57.067194 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:57 crc kubenswrapper[4792]: I0929 18:57:57.067274 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:57Z","lastTransitionTime":"2025-09-29T18:57:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:57 crc kubenswrapper[4792]: I0929 18:57:57.169737 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:57 crc kubenswrapper[4792]: I0929 18:57:57.169767 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:57 crc kubenswrapper[4792]: I0929 18:57:57.169776 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:57 crc kubenswrapper[4792]: I0929 18:57:57.169790 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:57 crc kubenswrapper[4792]: I0929 18:57:57.169814 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:57Z","lastTransitionTime":"2025-09-29T18:57:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:57 crc kubenswrapper[4792]: I0929 18:57:57.272266 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:57 crc kubenswrapper[4792]: I0929 18:57:57.272333 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:57 crc kubenswrapper[4792]: I0929 18:57:57.272351 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:57 crc kubenswrapper[4792]: I0929 18:57:57.272649 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:57 crc kubenswrapper[4792]: I0929 18:57:57.272689 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:57Z","lastTransitionTime":"2025-09-29T18:57:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:57 crc kubenswrapper[4792]: I0929 18:57:57.375632 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:57 crc kubenswrapper[4792]: I0929 18:57:57.375677 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:57 crc kubenswrapper[4792]: I0929 18:57:57.375692 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:57 crc kubenswrapper[4792]: I0929 18:57:57.375710 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:57 crc kubenswrapper[4792]: I0929 18:57:57.375727 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:57Z","lastTransitionTime":"2025-09-29T18:57:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:57 crc kubenswrapper[4792]: I0929 18:57:57.482741 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:57 crc kubenswrapper[4792]: I0929 18:57:57.483153 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:57 crc kubenswrapper[4792]: I0929 18:57:57.483400 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:57 crc kubenswrapper[4792]: I0929 18:57:57.483646 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:57 crc kubenswrapper[4792]: I0929 18:57:57.483803 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:57Z","lastTransitionTime":"2025-09-29T18:57:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:57 crc kubenswrapper[4792]: I0929 18:57:57.588627 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:57 crc kubenswrapper[4792]: I0929 18:57:57.588682 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:57 crc kubenswrapper[4792]: I0929 18:57:57.588700 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:57 crc kubenswrapper[4792]: I0929 18:57:57.588724 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:57 crc kubenswrapper[4792]: I0929 18:57:57.588741 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:57Z","lastTransitionTime":"2025-09-29T18:57:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:57 crc kubenswrapper[4792]: I0929 18:57:57.691985 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:57 crc kubenswrapper[4792]: I0929 18:57:57.692039 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:57 crc kubenswrapper[4792]: I0929 18:57:57.692056 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:57 crc kubenswrapper[4792]: I0929 18:57:57.692079 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:57 crc kubenswrapper[4792]: I0929 18:57:57.692096 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:57Z","lastTransitionTime":"2025-09-29T18:57:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:57 crc kubenswrapper[4792]: I0929 18:57:57.795579 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:57 crc kubenswrapper[4792]: I0929 18:57:57.795628 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:57 crc kubenswrapper[4792]: I0929 18:57:57.795640 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:57 crc kubenswrapper[4792]: I0929 18:57:57.795659 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:57 crc kubenswrapper[4792]: I0929 18:57:57.795671 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:57Z","lastTransitionTime":"2025-09-29T18:57:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:57 crc kubenswrapper[4792]: I0929 18:57:57.902246 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:57 crc kubenswrapper[4792]: I0929 18:57:57.902278 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:57 crc kubenswrapper[4792]: I0929 18:57:57.902287 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:57 crc kubenswrapper[4792]: I0929 18:57:57.902301 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:57 crc kubenswrapper[4792]: I0929 18:57:57.902324 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:57Z","lastTransitionTime":"2025-09-29T18:57:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:58 crc kubenswrapper[4792]: I0929 18:57:58.004870 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:58 crc kubenswrapper[4792]: I0929 18:57:58.005653 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:58 crc kubenswrapper[4792]: I0929 18:57:58.005682 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:58 crc kubenswrapper[4792]: I0929 18:57:58.005701 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:58 crc kubenswrapper[4792]: I0929 18:57:58.005715 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:58Z","lastTransitionTime":"2025-09-29T18:57:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:58 crc kubenswrapper[4792]: I0929 18:57:58.025601 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/kube-rbac-proxy-crio-crc"] Sep 29 18:57:58 crc kubenswrapper[4792]: I0929 18:57:58.112165 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:58 crc kubenswrapper[4792]: I0929 18:57:58.112349 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:58 crc kubenswrapper[4792]: I0929 18:57:58.112367 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:58 crc kubenswrapper[4792]: I0929 18:57:58.112409 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:58 crc kubenswrapper[4792]: I0929 18:57:58.112424 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:58Z","lastTransitionTime":"2025-09-29T18:57:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:58 crc kubenswrapper[4792]: I0929 18:57:58.215487 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:58 crc kubenswrapper[4792]: I0929 18:57:58.215537 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:58 crc kubenswrapper[4792]: I0929 18:57:58.215555 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:58 crc kubenswrapper[4792]: I0929 18:57:58.215577 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:58 crc kubenswrapper[4792]: I0929 18:57:58.215593 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:58Z","lastTransitionTime":"2025-09-29T18:57:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:58 crc kubenswrapper[4792]: I0929 18:57:58.318988 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:58 crc kubenswrapper[4792]: I0929 18:57:58.319045 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:58 crc kubenswrapper[4792]: I0929 18:57:58.319061 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:58 crc kubenswrapper[4792]: I0929 18:57:58.319085 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:58 crc kubenswrapper[4792]: I0929 18:57:58.319101 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:58Z","lastTransitionTime":"2025-09-29T18:57:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:58 crc kubenswrapper[4792]: I0929 18:57:58.421588 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:58 crc kubenswrapper[4792]: I0929 18:57:58.421638 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:58 crc kubenswrapper[4792]: I0929 18:57:58.421651 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:58 crc kubenswrapper[4792]: I0929 18:57:58.421668 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:58 crc kubenswrapper[4792]: I0929 18:57:58.421680 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:58Z","lastTransitionTime":"2025-09-29T18:57:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:58 crc kubenswrapper[4792]: I0929 18:57:58.524369 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:58 crc kubenswrapper[4792]: I0929 18:57:58.524413 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:58 crc kubenswrapper[4792]: I0929 18:57:58.524423 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:58 crc kubenswrapper[4792]: I0929 18:57:58.524435 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:58 crc kubenswrapper[4792]: I0929 18:57:58.524445 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:58Z","lastTransitionTime":"2025-09-29T18:57:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:58 crc kubenswrapper[4792]: I0929 18:57:58.626390 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:58 crc kubenswrapper[4792]: I0929 18:57:58.626432 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:58 crc kubenswrapper[4792]: I0929 18:57:58.626444 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:58 crc kubenswrapper[4792]: I0929 18:57:58.626461 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:58 crc kubenswrapper[4792]: I0929 18:57:58.626472 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:58Z","lastTransitionTime":"2025-09-29T18:57:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:58 crc kubenswrapper[4792]: I0929 18:57:58.728400 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:58 crc kubenswrapper[4792]: I0929 18:57:58.728435 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:58 crc kubenswrapper[4792]: I0929 18:57:58.728445 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:58 crc kubenswrapper[4792]: I0929 18:57:58.728460 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:58 crc kubenswrapper[4792]: I0929 18:57:58.728472 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:58Z","lastTransitionTime":"2025-09-29T18:57:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:58 crc kubenswrapper[4792]: I0929 18:57:58.831287 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:58 crc kubenswrapper[4792]: I0929 18:57:58.831318 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:58 crc kubenswrapper[4792]: I0929 18:57:58.831326 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:58 crc kubenswrapper[4792]: I0929 18:57:58.831338 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:58 crc kubenswrapper[4792]: I0929 18:57:58.831346 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:58Z","lastTransitionTime":"2025-09-29T18:57:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:58 crc kubenswrapper[4792]: I0929 18:57:58.934315 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:58 crc kubenswrapper[4792]: I0929 18:57:58.934369 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:58 crc kubenswrapper[4792]: I0929 18:57:58.934380 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:58 crc kubenswrapper[4792]: I0929 18:57:58.934397 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:58 crc kubenswrapper[4792]: I0929 18:57:58.934408 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:58Z","lastTransitionTime":"2025-09-29T18:57:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:59 crc kubenswrapper[4792]: I0929 18:57:59.014358 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 18:57:59 crc kubenswrapper[4792]: E0929 18:57:59.014515 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 18:57:59 crc kubenswrapper[4792]: I0929 18:57:59.014529 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 18:57:59 crc kubenswrapper[4792]: I0929 18:57:59.014583 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 18:57:59 crc kubenswrapper[4792]: E0929 18:57:59.014665 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 18:57:59 crc kubenswrapper[4792]: E0929 18:57:59.014749 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 18:57:59 crc kubenswrapper[4792]: I0929 18:57:59.014951 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-v5b2m" Sep 29 18:57:59 crc kubenswrapper[4792]: E0929 18:57:59.015049 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-v5b2m" podUID="fd292349-0e5a-4d80-b163-193aa43c98db" Sep 29 18:57:59 crc kubenswrapper[4792]: I0929 18:57:59.036740 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:59 crc kubenswrapper[4792]: I0929 18:57:59.036778 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:59 crc kubenswrapper[4792]: I0929 18:57:59.036790 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:59 crc kubenswrapper[4792]: I0929 18:57:59.036806 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:59 crc kubenswrapper[4792]: I0929 18:57:59.036815 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:59Z","lastTransitionTime":"2025-09-29T18:57:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:59 crc kubenswrapper[4792]: I0929 18:57:59.118353 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-5hwvp" podStartSLOduration=66.118337901 podStartE2EDuration="1m6.118337901s" podCreationTimestamp="2025-09-29 18:56:53 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 18:57:59.100528973 +0000 UTC m=+91.093836399" watchObservedRunningTime="2025-09-29 18:57:59.118337901 +0000 UTC m=+91.111645297" Sep 29 18:57:59 crc kubenswrapper[4792]: I0929 18:57:59.118867 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-crc" podStartSLOduration=66.118845156 podStartE2EDuration="1m6.118845156s" podCreationTimestamp="2025-09-29 18:56:53 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 18:57:59.11865826 +0000 UTC m=+91.111965656" watchObservedRunningTime="2025-09-29 18:57:59.118845156 +0000 UTC m=+91.112152552" Sep 29 18:57:59 crc kubenswrapper[4792]: I0929 18:57:59.138547 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:59 crc kubenswrapper[4792]: I0929 18:57:59.138593 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:59 crc kubenswrapper[4792]: I0929 18:57:59.138602 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:59 crc kubenswrapper[4792]: I0929 18:57:59.138616 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:59 crc kubenswrapper[4792]: I0929 18:57:59.138626 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:59Z","lastTransitionTime":"2025-09-29T18:57:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:59 crc kubenswrapper[4792]: I0929 18:57:59.192012 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns/node-resolver-c228l" podStartSLOduration=67.191991674 podStartE2EDuration="1m7.191991674s" podCreationTimestamp="2025-09-29 18:56:52 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 18:57:59.173828826 +0000 UTC m=+91.167136242" watchObservedRunningTime="2025-09-29 18:57:59.191991674 +0000 UTC m=+91.185299090" Sep 29 18:57:59 crc kubenswrapper[4792]: I0929 18:57:59.210913 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-rr4g5" podStartSLOduration=66.210888914 podStartE2EDuration="1m6.210888914s" podCreationTimestamp="2025-09-29 18:56:53 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 18:57:59.191988424 +0000 UTC m=+91.185295850" watchObservedRunningTime="2025-09-29 18:57:59.210888914 +0000 UTC m=+91.204196330" Sep 29 18:57:59 crc kubenswrapper[4792]: I0929 18:57:59.211119 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" podStartSLOduration=37.211112821 podStartE2EDuration="37.211112821s" podCreationTimestamp="2025-09-29 18:57:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 18:57:59.210282686 +0000 UTC m=+91.203590092" watchObservedRunningTime="2025-09-29 18:57:59.211112821 +0000 UTC m=+91.204420227" Sep 29 18:57:59 crc kubenswrapper[4792]: I0929 18:57:59.241331 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:59 crc kubenswrapper[4792]: I0929 18:57:59.241714 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:59 crc kubenswrapper[4792]: I0929 18:57:59.241829 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:59 crc kubenswrapper[4792]: I0929 18:57:59.241952 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:59 crc kubenswrapper[4792]: I0929 18:57:59.242029 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:59Z","lastTransitionTime":"2025-09-29T18:57:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:59 crc kubenswrapper[4792]: I0929 18:57:59.248276 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/node-ca-4gmtk" podStartSLOduration=67.248260132 podStartE2EDuration="1m7.248260132s" podCreationTimestamp="2025-09-29 18:56:52 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 18:57:59.247540851 +0000 UTC m=+91.240848277" watchObservedRunningTime="2025-09-29 18:57:59.248260132 +0000 UTC m=+91.241567528" Sep 29 18:57:59 crc kubenswrapper[4792]: I0929 18:57:59.258051 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" podStartSLOduration=1.258029112 podStartE2EDuration="1.258029112s" podCreationTimestamp="2025-09-29 18:57:58 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 18:57:59.257986421 +0000 UTC m=+91.251293827" watchObservedRunningTime="2025-09-29 18:57:59.258029112 +0000 UTC m=+91.251336538" Sep 29 18:57:59 crc kubenswrapper[4792]: I0929 18:57:59.284907 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-etcd/etcd-crc" podStartSLOduration=3.284885298 podStartE2EDuration="3.284885298s" podCreationTimestamp="2025-09-29 18:57:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 18:57:59.284058974 +0000 UTC m=+91.277366400" watchObservedRunningTime="2025-09-29 18:57:59.284885298 +0000 UTC m=+91.278192694" Sep 29 18:57:59 crc kubenswrapper[4792]: I0929 18:57:59.311708 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" podStartSLOduration=67.311689363 podStartE2EDuration="1m7.311689363s" podCreationTimestamp="2025-09-29 18:56:52 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 18:57:59.297553824 +0000 UTC m=+91.290861210" watchObservedRunningTime="2025-09-29 18:57:59.311689363 +0000 UTC m=+91.304996759" Sep 29 18:57:59 crc kubenswrapper[4792]: I0929 18:57:59.312220 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-additional-cni-plugins-rqbjv" podStartSLOduration=66.312214898 podStartE2EDuration="1m6.312214898s" podCreationTimestamp="2025-09-29 18:56:53 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 18:57:59.312066054 +0000 UTC m=+91.305373450" watchObservedRunningTime="2025-09-29 18:57:59.312214898 +0000 UTC m=+91.305522294" Sep 29 18:57:59 crc kubenswrapper[4792]: I0929 18:57:59.333954 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podStartSLOduration=70.333937612 podStartE2EDuration="1m10.333937612s" podCreationTimestamp="2025-09-29 18:56:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 18:57:59.333461698 +0000 UTC m=+91.326769094" watchObservedRunningTime="2025-09-29 18:57:59.333937612 +0000 UTC m=+91.327245008" Sep 29 18:57:59 crc kubenswrapper[4792]: I0929 18:57:59.343446 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:59 crc kubenswrapper[4792]: I0929 18:57:59.343483 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:59 crc kubenswrapper[4792]: I0929 18:57:59.343493 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:59 crc kubenswrapper[4792]: I0929 18:57:59.343507 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:59 crc kubenswrapper[4792]: I0929 18:57:59.343518 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:59Z","lastTransitionTime":"2025-09-29T18:57:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:59 crc kubenswrapper[4792]: I0929 18:57:59.446078 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:59 crc kubenswrapper[4792]: I0929 18:57:59.446124 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:59 crc kubenswrapper[4792]: I0929 18:57:59.446139 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:59 crc kubenswrapper[4792]: I0929 18:57:59.446159 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:59 crc kubenswrapper[4792]: I0929 18:57:59.446173 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:59Z","lastTransitionTime":"2025-09-29T18:57:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:59 crc kubenswrapper[4792]: I0929 18:57:59.548141 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:59 crc kubenswrapper[4792]: I0929 18:57:59.548218 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:59 crc kubenswrapper[4792]: I0929 18:57:59.548229 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:59 crc kubenswrapper[4792]: I0929 18:57:59.548243 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:59 crc kubenswrapper[4792]: I0929 18:57:59.548256 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:59Z","lastTransitionTime":"2025-09-29T18:57:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:59 crc kubenswrapper[4792]: I0929 18:57:59.649941 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:59 crc kubenswrapper[4792]: I0929 18:57:59.650210 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:59 crc kubenswrapper[4792]: I0929 18:57:59.650220 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:59 crc kubenswrapper[4792]: I0929 18:57:59.650238 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:59 crc kubenswrapper[4792]: I0929 18:57:59.650249 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:59Z","lastTransitionTime":"2025-09-29T18:57:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:59 crc kubenswrapper[4792]: I0929 18:57:59.752772 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:59 crc kubenswrapper[4792]: I0929 18:57:59.753073 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:59 crc kubenswrapper[4792]: I0929 18:57:59.753189 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:59 crc kubenswrapper[4792]: I0929 18:57:59.753290 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:59 crc kubenswrapper[4792]: I0929 18:57:59.753379 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:59Z","lastTransitionTime":"2025-09-29T18:57:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:59 crc kubenswrapper[4792]: I0929 18:57:59.855683 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:59 crc kubenswrapper[4792]: I0929 18:57:59.855740 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:59 crc kubenswrapper[4792]: I0929 18:57:59.855758 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:59 crc kubenswrapper[4792]: I0929 18:57:59.855779 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:59 crc kubenswrapper[4792]: I0929 18:57:59.855794 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:59Z","lastTransitionTime":"2025-09-29T18:57:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:57:59 crc kubenswrapper[4792]: I0929 18:57:59.960002 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:57:59 crc kubenswrapper[4792]: I0929 18:57:59.961150 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:57:59 crc kubenswrapper[4792]: I0929 18:57:59.961359 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:57:59 crc kubenswrapper[4792]: I0929 18:57:59.961556 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:57:59 crc kubenswrapper[4792]: I0929 18:57:59.961709 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:57:59Z","lastTransitionTime":"2025-09-29T18:57:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:58:00 crc kubenswrapper[4792]: I0929 18:58:00.065040 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:58:00 crc kubenswrapper[4792]: I0929 18:58:00.065090 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:58:00 crc kubenswrapper[4792]: I0929 18:58:00.065104 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:58:00 crc kubenswrapper[4792]: I0929 18:58:00.065122 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:58:00 crc kubenswrapper[4792]: I0929 18:58:00.065136 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:58:00Z","lastTransitionTime":"2025-09-29T18:58:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:58:00 crc kubenswrapper[4792]: I0929 18:58:00.169698 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:58:00 crc kubenswrapper[4792]: I0929 18:58:00.169767 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:58:00 crc kubenswrapper[4792]: I0929 18:58:00.169789 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:58:00 crc kubenswrapper[4792]: I0929 18:58:00.169947 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:58:00 crc kubenswrapper[4792]: I0929 18:58:00.169977 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:58:00Z","lastTransitionTime":"2025-09-29T18:58:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:58:00 crc kubenswrapper[4792]: I0929 18:58:00.272277 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:58:00 crc kubenswrapper[4792]: I0929 18:58:00.272322 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:58:00 crc kubenswrapper[4792]: I0929 18:58:00.272359 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:58:00 crc kubenswrapper[4792]: I0929 18:58:00.272376 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:58:00 crc kubenswrapper[4792]: I0929 18:58:00.272386 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:58:00Z","lastTransitionTime":"2025-09-29T18:58:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:58:00 crc kubenswrapper[4792]: I0929 18:58:00.375565 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:58:00 crc kubenswrapper[4792]: I0929 18:58:00.375624 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:58:00 crc kubenswrapper[4792]: I0929 18:58:00.375638 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:58:00 crc kubenswrapper[4792]: I0929 18:58:00.375661 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:58:00 crc kubenswrapper[4792]: I0929 18:58:00.375676 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:58:00Z","lastTransitionTime":"2025-09-29T18:58:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:58:00 crc kubenswrapper[4792]: I0929 18:58:00.478579 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:58:00 crc kubenswrapper[4792]: I0929 18:58:00.478630 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:58:00 crc kubenswrapper[4792]: I0929 18:58:00.478644 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:58:00 crc kubenswrapper[4792]: I0929 18:58:00.478663 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:58:00 crc kubenswrapper[4792]: I0929 18:58:00.478675 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:58:00Z","lastTransitionTime":"2025-09-29T18:58:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:58:00 crc kubenswrapper[4792]: I0929 18:58:00.581440 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:58:00 crc kubenswrapper[4792]: I0929 18:58:00.581729 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:58:00 crc kubenswrapper[4792]: I0929 18:58:00.581818 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:58:00 crc kubenswrapper[4792]: I0929 18:58:00.581902 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:58:00 crc kubenswrapper[4792]: I0929 18:58:00.581983 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:58:00Z","lastTransitionTime":"2025-09-29T18:58:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:58:00 crc kubenswrapper[4792]: I0929 18:58:00.684940 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:58:00 crc kubenswrapper[4792]: I0929 18:58:00.685199 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:58:00 crc kubenswrapper[4792]: I0929 18:58:00.685447 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:58:00 crc kubenswrapper[4792]: I0929 18:58:00.685674 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:58:00 crc kubenswrapper[4792]: I0929 18:58:00.685797 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:58:00Z","lastTransitionTime":"2025-09-29T18:58:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:58:00 crc kubenswrapper[4792]: I0929 18:58:00.788460 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:58:00 crc kubenswrapper[4792]: I0929 18:58:00.788523 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:58:00 crc kubenswrapper[4792]: I0929 18:58:00.788545 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:58:00 crc kubenswrapper[4792]: I0929 18:58:00.788575 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:58:00 crc kubenswrapper[4792]: I0929 18:58:00.788600 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:58:00Z","lastTransitionTime":"2025-09-29T18:58:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:58:00 crc kubenswrapper[4792]: I0929 18:58:00.890949 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:58:00 crc kubenswrapper[4792]: I0929 18:58:00.891338 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:58:00 crc kubenswrapper[4792]: I0929 18:58:00.891529 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:58:00 crc kubenswrapper[4792]: I0929 18:58:00.891910 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:58:00 crc kubenswrapper[4792]: I0929 18:58:00.892250 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:58:00Z","lastTransitionTime":"2025-09-29T18:58:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:58:00 crc kubenswrapper[4792]: I0929 18:58:00.995295 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:58:00 crc kubenswrapper[4792]: I0929 18:58:00.996006 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:58:00 crc kubenswrapper[4792]: I0929 18:58:00.996045 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:58:00 crc kubenswrapper[4792]: I0929 18:58:00.996075 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:58:00 crc kubenswrapper[4792]: I0929 18:58:00.996093 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:58:00Z","lastTransitionTime":"2025-09-29T18:58:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:58:01 crc kubenswrapper[4792]: I0929 18:58:01.014796 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 18:58:01 crc kubenswrapper[4792]: I0929 18:58:01.014962 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-v5b2m" Sep 29 18:58:01 crc kubenswrapper[4792]: E0929 18:58:01.015118 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 18:58:01 crc kubenswrapper[4792]: I0929 18:58:01.015198 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 18:58:01 crc kubenswrapper[4792]: E0929 18:58:01.015365 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-v5b2m" podUID="fd292349-0e5a-4d80-b163-193aa43c98db" Sep 29 18:58:01 crc kubenswrapper[4792]: E0929 18:58:01.015704 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 18:58:01 crc kubenswrapper[4792]: I0929 18:58:01.014826 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 18:58:01 crc kubenswrapper[4792]: E0929 18:58:01.016042 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 18:58:01 crc kubenswrapper[4792]: I0929 18:58:01.098488 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:58:01 crc kubenswrapper[4792]: I0929 18:58:01.098927 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:58:01 crc kubenswrapper[4792]: I0929 18:58:01.099114 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:58:01 crc kubenswrapper[4792]: I0929 18:58:01.099293 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:58:01 crc kubenswrapper[4792]: I0929 18:58:01.099440 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:58:01Z","lastTransitionTime":"2025-09-29T18:58:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:58:01 crc kubenswrapper[4792]: I0929 18:58:01.202339 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:58:01 crc kubenswrapper[4792]: I0929 18:58:01.202377 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:58:01 crc kubenswrapper[4792]: I0929 18:58:01.202389 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:58:01 crc kubenswrapper[4792]: I0929 18:58:01.202410 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:58:01 crc kubenswrapper[4792]: I0929 18:58:01.202424 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:58:01Z","lastTransitionTime":"2025-09-29T18:58:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:58:01 crc kubenswrapper[4792]: I0929 18:58:01.305227 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:58:01 crc kubenswrapper[4792]: I0929 18:58:01.305287 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:58:01 crc kubenswrapper[4792]: I0929 18:58:01.305299 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:58:01 crc kubenswrapper[4792]: I0929 18:58:01.305315 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:58:01 crc kubenswrapper[4792]: I0929 18:58:01.305327 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:58:01Z","lastTransitionTime":"2025-09-29T18:58:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:58:01 crc kubenswrapper[4792]: I0929 18:58:01.408204 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:58:01 crc kubenswrapper[4792]: I0929 18:58:01.408272 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:58:01 crc kubenswrapper[4792]: I0929 18:58:01.408292 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:58:01 crc kubenswrapper[4792]: I0929 18:58:01.408315 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:58:01 crc kubenswrapper[4792]: I0929 18:58:01.408332 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:58:01Z","lastTransitionTime":"2025-09-29T18:58:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:58:01 crc kubenswrapper[4792]: I0929 18:58:01.510162 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:58:01 crc kubenswrapper[4792]: I0929 18:58:01.510508 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:58:01 crc kubenswrapper[4792]: I0929 18:58:01.510652 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:58:01 crc kubenswrapper[4792]: I0929 18:58:01.510803 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:58:01 crc kubenswrapper[4792]: I0929 18:58:01.510986 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:58:01Z","lastTransitionTime":"2025-09-29T18:58:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:58:01 crc kubenswrapper[4792]: I0929 18:58:01.614634 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:58:01 crc kubenswrapper[4792]: I0929 18:58:01.614766 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:58:01 crc kubenswrapper[4792]: I0929 18:58:01.614783 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:58:01 crc kubenswrapper[4792]: I0929 18:58:01.614805 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:58:01 crc kubenswrapper[4792]: I0929 18:58:01.614822 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:58:01Z","lastTransitionTime":"2025-09-29T18:58:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:58:01 crc kubenswrapper[4792]: I0929 18:58:01.717611 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:58:01 crc kubenswrapper[4792]: I0929 18:58:01.718061 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:58:01 crc kubenswrapper[4792]: I0929 18:58:01.718427 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:58:01 crc kubenswrapper[4792]: I0929 18:58:01.718595 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:58:01 crc kubenswrapper[4792]: I0929 18:58:01.719048 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:58:01Z","lastTransitionTime":"2025-09-29T18:58:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:58:01 crc kubenswrapper[4792]: I0929 18:58:01.821519 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:58:01 crc kubenswrapper[4792]: I0929 18:58:01.821575 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:58:01 crc kubenswrapper[4792]: I0929 18:58:01.821617 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:58:01 crc kubenswrapper[4792]: I0929 18:58:01.821653 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:58:01 crc kubenswrapper[4792]: I0929 18:58:01.821676 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:58:01Z","lastTransitionTime":"2025-09-29T18:58:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:58:01 crc kubenswrapper[4792]: I0929 18:58:01.924844 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:58:01 crc kubenswrapper[4792]: I0929 18:58:01.924920 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:58:01 crc kubenswrapper[4792]: I0929 18:58:01.924938 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:58:01 crc kubenswrapper[4792]: I0929 18:58:01.924963 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:58:01 crc kubenswrapper[4792]: I0929 18:58:01.924980 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:58:01Z","lastTransitionTime":"2025-09-29T18:58:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:58:02 crc kubenswrapper[4792]: I0929 18:58:02.028044 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:58:02 crc kubenswrapper[4792]: I0929 18:58:02.029019 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:58:02 crc kubenswrapper[4792]: I0929 18:58:02.029051 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:58:02 crc kubenswrapper[4792]: I0929 18:58:02.029082 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:58:02 crc kubenswrapper[4792]: I0929 18:58:02.029110 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:58:02Z","lastTransitionTime":"2025-09-29T18:58:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:58:02 crc kubenswrapper[4792]: I0929 18:58:02.132787 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:58:02 crc kubenswrapper[4792]: I0929 18:58:02.132899 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:58:02 crc kubenswrapper[4792]: I0929 18:58:02.132918 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:58:02 crc kubenswrapper[4792]: I0929 18:58:02.132943 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:58:02 crc kubenswrapper[4792]: I0929 18:58:02.132960 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:58:02Z","lastTransitionTime":"2025-09-29T18:58:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:58:02 crc kubenswrapper[4792]: I0929 18:58:02.235458 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:58:02 crc kubenswrapper[4792]: I0929 18:58:02.235521 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:58:02 crc kubenswrapper[4792]: I0929 18:58:02.235540 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:58:02 crc kubenswrapper[4792]: I0929 18:58:02.235566 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:58:02 crc kubenswrapper[4792]: I0929 18:58:02.235590 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:58:02Z","lastTransitionTime":"2025-09-29T18:58:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:58:02 crc kubenswrapper[4792]: I0929 18:58:02.338423 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:58:02 crc kubenswrapper[4792]: I0929 18:58:02.338487 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:58:02 crc kubenswrapper[4792]: I0929 18:58:02.338504 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:58:02 crc kubenswrapper[4792]: I0929 18:58:02.338530 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:58:02 crc kubenswrapper[4792]: I0929 18:58:02.338548 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:58:02Z","lastTransitionTime":"2025-09-29T18:58:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:58:02 crc kubenswrapper[4792]: I0929 18:58:02.441093 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:58:02 crc kubenswrapper[4792]: I0929 18:58:02.441157 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:58:02 crc kubenswrapper[4792]: I0929 18:58:02.441174 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:58:02 crc kubenswrapper[4792]: I0929 18:58:02.441198 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:58:02 crc kubenswrapper[4792]: I0929 18:58:02.441222 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:58:02Z","lastTransitionTime":"2025-09-29T18:58:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:58:02 crc kubenswrapper[4792]: I0929 18:58:02.543185 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:58:02 crc kubenswrapper[4792]: I0929 18:58:02.543223 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:58:02 crc kubenswrapper[4792]: I0929 18:58:02.543233 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:58:02 crc kubenswrapper[4792]: I0929 18:58:02.543249 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:58:02 crc kubenswrapper[4792]: I0929 18:58:02.543260 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:58:02Z","lastTransitionTime":"2025-09-29T18:58:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:58:02 crc kubenswrapper[4792]: I0929 18:58:02.645608 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:58:02 crc kubenswrapper[4792]: I0929 18:58:02.645665 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:58:02 crc kubenswrapper[4792]: I0929 18:58:02.645685 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:58:02 crc kubenswrapper[4792]: I0929 18:58:02.645711 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:58:02 crc kubenswrapper[4792]: I0929 18:58:02.645733 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:58:02Z","lastTransitionTime":"2025-09-29T18:58:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:58:02 crc kubenswrapper[4792]: I0929 18:58:02.749391 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:58:02 crc kubenswrapper[4792]: I0929 18:58:02.749464 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:58:02 crc kubenswrapper[4792]: I0929 18:58:02.749489 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:58:02 crc kubenswrapper[4792]: I0929 18:58:02.749519 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:58:02 crc kubenswrapper[4792]: I0929 18:58:02.749560 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:58:02Z","lastTransitionTime":"2025-09-29T18:58:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:58:02 crc kubenswrapper[4792]: I0929 18:58:02.853161 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:58:02 crc kubenswrapper[4792]: I0929 18:58:02.853511 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:58:02 crc kubenswrapper[4792]: I0929 18:58:02.853895 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:58:02 crc kubenswrapper[4792]: I0929 18:58:02.854370 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:58:02 crc kubenswrapper[4792]: I0929 18:58:02.854740 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:58:02Z","lastTransitionTime":"2025-09-29T18:58:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:58:02 crc kubenswrapper[4792]: I0929 18:58:02.958633 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:58:02 crc kubenswrapper[4792]: I0929 18:58:02.959077 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:58:02 crc kubenswrapper[4792]: I0929 18:58:02.959243 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:58:02 crc kubenswrapper[4792]: I0929 18:58:02.959370 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:58:02 crc kubenswrapper[4792]: I0929 18:58:02.959505 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:58:02Z","lastTransitionTime":"2025-09-29T18:58:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:58:03 crc kubenswrapper[4792]: I0929 18:58:03.015120 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-v5b2m" Sep 29 18:58:03 crc kubenswrapper[4792]: E0929 18:58:03.015296 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-v5b2m" podUID="fd292349-0e5a-4d80-b163-193aa43c98db" Sep 29 18:58:03 crc kubenswrapper[4792]: I0929 18:58:03.015572 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 18:58:03 crc kubenswrapper[4792]: E0929 18:58:03.015666 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 18:58:03 crc kubenswrapper[4792]: I0929 18:58:03.015840 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 18:58:03 crc kubenswrapper[4792]: E0929 18:58:03.015965 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 18:58:03 crc kubenswrapper[4792]: I0929 18:58:03.016029 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 18:58:03 crc kubenswrapper[4792]: E0929 18:58:03.016107 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 18:58:03 crc kubenswrapper[4792]: I0929 18:58:03.062693 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:58:03 crc kubenswrapper[4792]: I0929 18:58:03.062766 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:58:03 crc kubenswrapper[4792]: I0929 18:58:03.062783 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:58:03 crc kubenswrapper[4792]: I0929 18:58:03.063319 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:58:03 crc kubenswrapper[4792]: I0929 18:58:03.063384 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:58:03Z","lastTransitionTime":"2025-09-29T18:58:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:58:03 crc kubenswrapper[4792]: I0929 18:58:03.167042 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:58:03 crc kubenswrapper[4792]: I0929 18:58:03.167107 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:58:03 crc kubenswrapper[4792]: I0929 18:58:03.167128 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:58:03 crc kubenswrapper[4792]: I0929 18:58:03.167156 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:58:03 crc kubenswrapper[4792]: I0929 18:58:03.167177 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:58:03Z","lastTransitionTime":"2025-09-29T18:58:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:58:03 crc kubenswrapper[4792]: I0929 18:58:03.270276 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:58:03 crc kubenswrapper[4792]: I0929 18:58:03.270342 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:58:03 crc kubenswrapper[4792]: I0929 18:58:03.270362 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:58:03 crc kubenswrapper[4792]: I0929 18:58:03.270384 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:58:03 crc kubenswrapper[4792]: I0929 18:58:03.270459 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:58:03Z","lastTransitionTime":"2025-09-29T18:58:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:58:03 crc kubenswrapper[4792]: I0929 18:58:03.373650 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:58:03 crc kubenswrapper[4792]: I0929 18:58:03.373747 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:58:03 crc kubenswrapper[4792]: I0929 18:58:03.373765 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:58:03 crc kubenswrapper[4792]: I0929 18:58:03.373793 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:58:03 crc kubenswrapper[4792]: I0929 18:58:03.373812 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:58:03Z","lastTransitionTime":"2025-09-29T18:58:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:58:03 crc kubenswrapper[4792]: I0929 18:58:03.476819 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:58:03 crc kubenswrapper[4792]: I0929 18:58:03.476904 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:58:03 crc kubenswrapper[4792]: I0929 18:58:03.476922 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:58:03 crc kubenswrapper[4792]: I0929 18:58:03.476953 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:58:03 crc kubenswrapper[4792]: I0929 18:58:03.476972 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:58:03Z","lastTransitionTime":"2025-09-29T18:58:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:58:03 crc kubenswrapper[4792]: I0929 18:58:03.579716 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:58:03 crc kubenswrapper[4792]: I0929 18:58:03.579975 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:58:03 crc kubenswrapper[4792]: I0929 18:58:03.579988 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:58:03 crc kubenswrapper[4792]: I0929 18:58:03.580005 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:58:03 crc kubenswrapper[4792]: I0929 18:58:03.580017 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:58:03Z","lastTransitionTime":"2025-09-29T18:58:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:58:03 crc kubenswrapper[4792]: I0929 18:58:03.683075 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:58:03 crc kubenswrapper[4792]: I0929 18:58:03.683478 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:58:03 crc kubenswrapper[4792]: I0929 18:58:03.683757 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:58:03 crc kubenswrapper[4792]: I0929 18:58:03.684101 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:58:03 crc kubenswrapper[4792]: I0929 18:58:03.684338 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:58:03Z","lastTransitionTime":"2025-09-29T18:58:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:58:03 crc kubenswrapper[4792]: I0929 18:58:03.787241 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:58:03 crc kubenswrapper[4792]: I0929 18:58:03.787287 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:58:03 crc kubenswrapper[4792]: I0929 18:58:03.787306 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:58:03 crc kubenswrapper[4792]: I0929 18:58:03.787329 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:58:03 crc kubenswrapper[4792]: I0929 18:58:03.787347 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:58:03Z","lastTransitionTime":"2025-09-29T18:58:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:58:03 crc kubenswrapper[4792]: I0929 18:58:03.890690 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:58:03 crc kubenswrapper[4792]: I0929 18:58:03.890739 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:58:03 crc kubenswrapper[4792]: I0929 18:58:03.890755 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:58:03 crc kubenswrapper[4792]: I0929 18:58:03.890777 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:58:03 crc kubenswrapper[4792]: I0929 18:58:03.890793 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:58:03Z","lastTransitionTime":"2025-09-29T18:58:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:58:03 crc kubenswrapper[4792]: I0929 18:58:03.993438 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:58:03 crc kubenswrapper[4792]: I0929 18:58:03.993769 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:58:03 crc kubenswrapper[4792]: I0929 18:58:03.993975 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:58:03 crc kubenswrapper[4792]: I0929 18:58:03.994187 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:58:03 crc kubenswrapper[4792]: I0929 18:58:03.994464 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:58:03Z","lastTransitionTime":"2025-09-29T18:58:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:58:04 crc kubenswrapper[4792]: I0929 18:58:04.100289 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:58:04 crc kubenswrapper[4792]: I0929 18:58:04.100354 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:58:04 crc kubenswrapper[4792]: I0929 18:58:04.100374 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:58:04 crc kubenswrapper[4792]: I0929 18:58:04.100397 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:58:04 crc kubenswrapper[4792]: I0929 18:58:04.100415 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:58:04Z","lastTransitionTime":"2025-09-29T18:58:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:58:04 crc kubenswrapper[4792]: I0929 18:58:04.203433 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:58:04 crc kubenswrapper[4792]: I0929 18:58:04.203887 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:58:04 crc kubenswrapper[4792]: I0929 18:58:04.204342 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:58:04 crc kubenswrapper[4792]: I0929 18:58:04.204998 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:58:04 crc kubenswrapper[4792]: I0929 18:58:04.205303 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:58:04Z","lastTransitionTime":"2025-09-29T18:58:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:58:04 crc kubenswrapper[4792]: I0929 18:58:04.308551 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:58:04 crc kubenswrapper[4792]: I0929 18:58:04.309130 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:58:04 crc kubenswrapper[4792]: I0929 18:58:04.309590 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:58:04 crc kubenswrapper[4792]: I0929 18:58:04.310018 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:58:04 crc kubenswrapper[4792]: I0929 18:58:04.310558 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:58:04Z","lastTransitionTime":"2025-09-29T18:58:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:58:04 crc kubenswrapper[4792]: I0929 18:58:04.413968 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:58:04 crc kubenswrapper[4792]: I0929 18:58:04.414338 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:58:04 crc kubenswrapper[4792]: I0929 18:58:04.414589 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:58:04 crc kubenswrapper[4792]: I0929 18:58:04.414929 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:58:04 crc kubenswrapper[4792]: I0929 18:58:04.415170 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:58:04Z","lastTransitionTime":"2025-09-29T18:58:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:58:04 crc kubenswrapper[4792]: I0929 18:58:04.518218 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:58:04 crc kubenswrapper[4792]: I0929 18:58:04.518619 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:58:04 crc kubenswrapper[4792]: I0929 18:58:04.518717 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:58:04 crc kubenswrapper[4792]: I0929 18:58:04.518822 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:58:04 crc kubenswrapper[4792]: I0929 18:58:04.518937 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:58:04Z","lastTransitionTime":"2025-09-29T18:58:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:58:04 crc kubenswrapper[4792]: I0929 18:58:04.622268 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:58:04 crc kubenswrapper[4792]: I0929 18:58:04.622599 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:58:04 crc kubenswrapper[4792]: I0929 18:58:04.622748 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:58:04 crc kubenswrapper[4792]: I0929 18:58:04.622952 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:58:04 crc kubenswrapper[4792]: I0929 18:58:04.623117 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:58:04Z","lastTransitionTime":"2025-09-29T18:58:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:58:04 crc kubenswrapper[4792]: I0929 18:58:04.725885 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:58:04 crc kubenswrapper[4792]: I0929 18:58:04.726104 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:58:04 crc kubenswrapper[4792]: I0929 18:58:04.726180 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:58:04 crc kubenswrapper[4792]: I0929 18:58:04.726250 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:58:04 crc kubenswrapper[4792]: I0929 18:58:04.726314 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:58:04Z","lastTransitionTime":"2025-09-29T18:58:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:58:04 crc kubenswrapper[4792]: I0929 18:58:04.827905 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:58:04 crc kubenswrapper[4792]: I0929 18:58:04.827950 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:58:04 crc kubenswrapper[4792]: I0929 18:58:04.827963 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:58:04 crc kubenswrapper[4792]: I0929 18:58:04.827980 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:58:04 crc kubenswrapper[4792]: I0929 18:58:04.827992 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:58:04Z","lastTransitionTime":"2025-09-29T18:58:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:58:04 crc kubenswrapper[4792]: I0929 18:58:04.930273 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:58:04 crc kubenswrapper[4792]: I0929 18:58:04.930673 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:58:04 crc kubenswrapper[4792]: I0929 18:58:04.930828 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:58:04 crc kubenswrapper[4792]: I0929 18:58:04.931029 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:58:04 crc kubenswrapper[4792]: I0929 18:58:04.931190 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:58:04Z","lastTransitionTime":"2025-09-29T18:58:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:58:05 crc kubenswrapper[4792]: I0929 18:58:05.014465 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 18:58:05 crc kubenswrapper[4792]: I0929 18:58:05.014611 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 18:58:05 crc kubenswrapper[4792]: E0929 18:58:05.014626 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 18:58:05 crc kubenswrapper[4792]: I0929 18:58:05.014680 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 18:58:05 crc kubenswrapper[4792]: E0929 18:58:05.014760 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 18:58:05 crc kubenswrapper[4792]: E0929 18:58:05.014994 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 18:58:05 crc kubenswrapper[4792]: I0929 18:58:05.015292 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-v5b2m" Sep 29 18:58:05 crc kubenswrapper[4792]: E0929 18:58:05.015437 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-v5b2m" podUID="fd292349-0e5a-4d80-b163-193aa43c98db" Sep 29 18:58:05 crc kubenswrapper[4792]: I0929 18:58:05.034096 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:58:05 crc kubenswrapper[4792]: I0929 18:58:05.034123 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:58:05 crc kubenswrapper[4792]: I0929 18:58:05.034133 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:58:05 crc kubenswrapper[4792]: I0929 18:58:05.034146 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:58:05 crc kubenswrapper[4792]: I0929 18:58:05.034173 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:58:05Z","lastTransitionTime":"2025-09-29T18:58:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:58:05 crc kubenswrapper[4792]: I0929 18:58:05.137041 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:58:05 crc kubenswrapper[4792]: I0929 18:58:05.137094 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:58:05 crc kubenswrapper[4792]: I0929 18:58:05.137111 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:58:05 crc kubenswrapper[4792]: I0929 18:58:05.137133 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:58:05 crc kubenswrapper[4792]: I0929 18:58:05.137150 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:58:05Z","lastTransitionTime":"2025-09-29T18:58:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:58:05 crc kubenswrapper[4792]: I0929 18:58:05.240023 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:58:05 crc kubenswrapper[4792]: I0929 18:58:05.240380 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:58:05 crc kubenswrapper[4792]: I0929 18:58:05.240580 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:58:05 crc kubenswrapper[4792]: I0929 18:58:05.240745 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:58:05 crc kubenswrapper[4792]: I0929 18:58:05.240944 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:58:05Z","lastTransitionTime":"2025-09-29T18:58:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:58:05 crc kubenswrapper[4792]: I0929 18:58:05.343029 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:58:05 crc kubenswrapper[4792]: I0929 18:58:05.343065 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:58:05 crc kubenswrapper[4792]: I0929 18:58:05.343075 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:58:05 crc kubenswrapper[4792]: I0929 18:58:05.343090 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:58:05 crc kubenswrapper[4792]: I0929 18:58:05.343101 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:58:05Z","lastTransitionTime":"2025-09-29T18:58:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:58:05 crc kubenswrapper[4792]: I0929 18:58:05.446679 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:58:05 crc kubenswrapper[4792]: I0929 18:58:05.446729 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:58:05 crc kubenswrapper[4792]: I0929 18:58:05.446740 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:58:05 crc kubenswrapper[4792]: I0929 18:58:05.446756 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:58:05 crc kubenswrapper[4792]: I0929 18:58:05.446765 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:58:05Z","lastTransitionTime":"2025-09-29T18:58:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:58:05 crc kubenswrapper[4792]: I0929 18:58:05.549398 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:58:05 crc kubenswrapper[4792]: I0929 18:58:05.549454 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:58:05 crc kubenswrapper[4792]: I0929 18:58:05.549472 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:58:05 crc kubenswrapper[4792]: I0929 18:58:05.549496 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:58:05 crc kubenswrapper[4792]: I0929 18:58:05.549513 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:58:05Z","lastTransitionTime":"2025-09-29T18:58:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:58:05 crc kubenswrapper[4792]: I0929 18:58:05.652497 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:58:05 crc kubenswrapper[4792]: I0929 18:58:05.652561 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:58:05 crc kubenswrapper[4792]: I0929 18:58:05.652583 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:58:05 crc kubenswrapper[4792]: I0929 18:58:05.652612 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:58:05 crc kubenswrapper[4792]: I0929 18:58:05.652640 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:58:05Z","lastTransitionTime":"2025-09-29T18:58:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:58:05 crc kubenswrapper[4792]: I0929 18:58:05.755459 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:58:05 crc kubenswrapper[4792]: I0929 18:58:05.755533 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:58:05 crc kubenswrapper[4792]: I0929 18:58:05.755555 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:58:05 crc kubenswrapper[4792]: I0929 18:58:05.755588 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:58:05 crc kubenswrapper[4792]: I0929 18:58:05.755622 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:58:05Z","lastTransitionTime":"2025-09-29T18:58:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:58:05 crc kubenswrapper[4792]: I0929 18:58:05.859358 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:58:05 crc kubenswrapper[4792]: I0929 18:58:05.859409 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:58:05 crc kubenswrapper[4792]: I0929 18:58:05.859426 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:58:05 crc kubenswrapper[4792]: I0929 18:58:05.859447 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:58:05 crc kubenswrapper[4792]: I0929 18:58:05.859481 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:58:05Z","lastTransitionTime":"2025-09-29T18:58:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:58:05 crc kubenswrapper[4792]: I0929 18:58:05.963053 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:58:05 crc kubenswrapper[4792]: I0929 18:58:05.963144 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:58:05 crc kubenswrapper[4792]: I0929 18:58:05.963163 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:58:05 crc kubenswrapper[4792]: I0929 18:58:05.963187 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:58:05 crc kubenswrapper[4792]: I0929 18:58:05.963210 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:58:05Z","lastTransitionTime":"2025-09-29T18:58:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:58:06 crc kubenswrapper[4792]: I0929 18:58:06.066729 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:58:06 crc kubenswrapper[4792]: I0929 18:58:06.066796 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:58:06 crc kubenswrapper[4792]: I0929 18:58:06.066822 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:58:06 crc kubenswrapper[4792]: I0929 18:58:06.066882 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:58:06 crc kubenswrapper[4792]: I0929 18:58:06.066906 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:58:06Z","lastTransitionTime":"2025-09-29T18:58:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:58:06 crc kubenswrapper[4792]: I0929 18:58:06.170488 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:58:06 crc kubenswrapper[4792]: I0929 18:58:06.170751 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:58:06 crc kubenswrapper[4792]: I0929 18:58:06.170782 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:58:06 crc kubenswrapper[4792]: I0929 18:58:06.170902 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:58:06 crc kubenswrapper[4792]: I0929 18:58:06.170932 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:58:06Z","lastTransitionTime":"2025-09-29T18:58:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:58:06 crc kubenswrapper[4792]: I0929 18:58:06.274178 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:58:06 crc kubenswrapper[4792]: I0929 18:58:06.274229 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:58:06 crc kubenswrapper[4792]: I0929 18:58:06.274240 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:58:06 crc kubenswrapper[4792]: I0929 18:58:06.274260 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:58:06 crc kubenswrapper[4792]: I0929 18:58:06.274273 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:58:06Z","lastTransitionTime":"2025-09-29T18:58:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:58:06 crc kubenswrapper[4792]: I0929 18:58:06.377078 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:58:06 crc kubenswrapper[4792]: I0929 18:58:06.377146 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:58:06 crc kubenswrapper[4792]: I0929 18:58:06.377169 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:58:06 crc kubenswrapper[4792]: I0929 18:58:06.377242 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:58:06 crc kubenswrapper[4792]: I0929 18:58:06.377300 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:58:06Z","lastTransitionTime":"2025-09-29T18:58:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:58:06 crc kubenswrapper[4792]: I0929 18:58:06.480074 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:58:06 crc kubenswrapper[4792]: I0929 18:58:06.480130 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:58:06 crc kubenswrapper[4792]: I0929 18:58:06.480188 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:58:06 crc kubenswrapper[4792]: I0929 18:58:06.480216 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:58:06 crc kubenswrapper[4792]: I0929 18:58:06.480233 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:58:06Z","lastTransitionTime":"2025-09-29T18:58:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:58:06 crc kubenswrapper[4792]: I0929 18:58:06.583272 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:58:06 crc kubenswrapper[4792]: I0929 18:58:06.583319 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:58:06 crc kubenswrapper[4792]: I0929 18:58:06.583330 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:58:06 crc kubenswrapper[4792]: I0929 18:58:06.583346 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:58:06 crc kubenswrapper[4792]: I0929 18:58:06.583358 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:58:06Z","lastTransitionTime":"2025-09-29T18:58:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:58:06 crc kubenswrapper[4792]: I0929 18:58:06.686090 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:58:06 crc kubenswrapper[4792]: I0929 18:58:06.686432 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:58:06 crc kubenswrapper[4792]: I0929 18:58:06.686525 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:58:06 crc kubenswrapper[4792]: I0929 18:58:06.686629 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:58:06 crc kubenswrapper[4792]: I0929 18:58:06.686743 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:58:06Z","lastTransitionTime":"2025-09-29T18:58:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:58:06 crc kubenswrapper[4792]: I0929 18:58:06.741926 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 18:58:06 crc kubenswrapper[4792]: I0929 18:58:06.742217 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 18:58:06 crc kubenswrapper[4792]: I0929 18:58:06.742338 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 18:58:06 crc kubenswrapper[4792]: I0929 18:58:06.742419 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 18:58:06 crc kubenswrapper[4792]: I0929 18:58:06.742486 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T18:58:06Z","lastTransitionTime":"2025-09-29T18:58:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 18:58:06 crc kubenswrapper[4792]: I0929 18:58:06.785687 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-version/cluster-version-operator-5c965bbfc6-8mt6k"] Sep 29 18:58:06 crc kubenswrapper[4792]: I0929 18:58:06.786072 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-8mt6k" Sep 29 18:58:06 crc kubenswrapper[4792]: I0929 18:58:06.788698 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"default-dockercfg-gxtc4" Sep 29 18:58:06 crc kubenswrapper[4792]: I0929 18:58:06.789000 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"kube-root-ca.crt" Sep 29 18:58:06 crc kubenswrapper[4792]: I0929 18:58:06.789138 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"openshift-service-ca.crt" Sep 29 18:58:06 crc kubenswrapper[4792]: I0929 18:58:06.789509 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"cluster-version-operator-serving-cert" Sep 29 18:58:06 crc kubenswrapper[4792]: I0929 18:58:06.868986 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/a0cea961-de86-460c-a553-6b1c060f879f-service-ca\") pod \"cluster-version-operator-5c965bbfc6-8mt6k\" (UID: \"a0cea961-de86-460c-a553-6b1c060f879f\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-8mt6k" Sep 29 18:58:06 crc kubenswrapper[4792]: I0929 18:58:06.869035 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/a0cea961-de86-460c-a553-6b1c060f879f-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-8mt6k\" (UID: \"a0cea961-de86-460c-a553-6b1c060f879f\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-8mt6k" Sep 29 18:58:06 crc kubenswrapper[4792]: I0929 18:58:06.869146 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a0cea961-de86-460c-a553-6b1c060f879f-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-8mt6k\" (UID: \"a0cea961-de86-460c-a553-6b1c060f879f\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-8mt6k" Sep 29 18:58:06 crc kubenswrapper[4792]: I0929 18:58:06.869300 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/a0cea961-de86-460c-a553-6b1c060f879f-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-8mt6k\" (UID: \"a0cea961-de86-460c-a553-6b1c060f879f\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-8mt6k" Sep 29 18:58:06 crc kubenswrapper[4792]: I0929 18:58:06.869353 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/a0cea961-de86-460c-a553-6b1c060f879f-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-8mt6k\" (UID: \"a0cea961-de86-460c-a553-6b1c060f879f\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-8mt6k" Sep 29 18:58:06 crc kubenswrapper[4792]: I0929 18:58:06.970547 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/a0cea961-de86-460c-a553-6b1c060f879f-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-8mt6k\" (UID: \"a0cea961-de86-460c-a553-6b1c060f879f\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-8mt6k" Sep 29 18:58:06 crc kubenswrapper[4792]: I0929 18:58:06.970597 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/a0cea961-de86-460c-a553-6b1c060f879f-service-ca\") pod \"cluster-version-operator-5c965bbfc6-8mt6k\" (UID: \"a0cea961-de86-460c-a553-6b1c060f879f\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-8mt6k" Sep 29 18:58:06 crc kubenswrapper[4792]: I0929 18:58:06.970631 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/a0cea961-de86-460c-a553-6b1c060f879f-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-8mt6k\" (UID: \"a0cea961-de86-460c-a553-6b1c060f879f\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-8mt6k" Sep 29 18:58:06 crc kubenswrapper[4792]: I0929 18:58:06.970655 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a0cea961-de86-460c-a553-6b1c060f879f-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-8mt6k\" (UID: \"a0cea961-de86-460c-a553-6b1c060f879f\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-8mt6k" Sep 29 18:58:06 crc kubenswrapper[4792]: I0929 18:58:06.970693 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/a0cea961-de86-460c-a553-6b1c060f879f-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-8mt6k\" (UID: \"a0cea961-de86-460c-a553-6b1c060f879f\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-8mt6k" Sep 29 18:58:06 crc kubenswrapper[4792]: I0929 18:58:06.971078 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/a0cea961-de86-460c-a553-6b1c060f879f-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-8mt6k\" (UID: \"a0cea961-de86-460c-a553-6b1c060f879f\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-8mt6k" Sep 29 18:58:06 crc kubenswrapper[4792]: I0929 18:58:06.971135 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/a0cea961-de86-460c-a553-6b1c060f879f-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-8mt6k\" (UID: \"a0cea961-de86-460c-a553-6b1c060f879f\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-8mt6k" Sep 29 18:58:06 crc kubenswrapper[4792]: I0929 18:58:06.971518 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/a0cea961-de86-460c-a553-6b1c060f879f-service-ca\") pod \"cluster-version-operator-5c965bbfc6-8mt6k\" (UID: \"a0cea961-de86-460c-a553-6b1c060f879f\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-8mt6k" Sep 29 18:58:06 crc kubenswrapper[4792]: I0929 18:58:06.978089 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a0cea961-de86-460c-a553-6b1c060f879f-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-8mt6k\" (UID: \"a0cea961-de86-460c-a553-6b1c060f879f\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-8mt6k" Sep 29 18:58:06 crc kubenswrapper[4792]: I0929 18:58:06.987232 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/a0cea961-de86-460c-a553-6b1c060f879f-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-8mt6k\" (UID: \"a0cea961-de86-460c-a553-6b1c060f879f\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-8mt6k" Sep 29 18:58:07 crc kubenswrapper[4792]: I0929 18:58:07.014693 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-v5b2m" Sep 29 18:58:07 crc kubenswrapper[4792]: I0929 18:58:07.014742 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 18:58:07 crc kubenswrapper[4792]: I0929 18:58:07.014825 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 18:58:07 crc kubenswrapper[4792]: E0929 18:58:07.014818 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-v5b2m" podUID="fd292349-0e5a-4d80-b163-193aa43c98db" Sep 29 18:58:07 crc kubenswrapper[4792]: E0929 18:58:07.014949 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 18:58:07 crc kubenswrapper[4792]: I0929 18:58:07.014709 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 18:58:07 crc kubenswrapper[4792]: E0929 18:58:07.015002 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 18:58:07 crc kubenswrapper[4792]: E0929 18:58:07.015045 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 18:58:07 crc kubenswrapper[4792]: I0929 18:58:07.100461 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-8mt6k" Sep 29 18:58:07 crc kubenswrapper[4792]: W0929 18:58:07.118065 4792 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda0cea961_de86_460c_a553_6b1c060f879f.slice/crio-3889897e7424d6abf0ef779094432b6531c9c09df961dc1b972da3a0f245a3cb WatchSource:0}: Error finding container 3889897e7424d6abf0ef779094432b6531c9c09df961dc1b972da3a0f245a3cb: Status 404 returned error can't find the container with id 3889897e7424d6abf0ef779094432b6531c9c09df961dc1b972da3a0f245a3cb Sep 29 18:58:07 crc kubenswrapper[4792]: I0929 18:58:07.576052 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-8mt6k" event={"ID":"a0cea961-de86-460c-a553-6b1c060f879f","Type":"ContainerStarted","Data":"558d04d75173e037a5ac8b4d2a0b9cf277fd954fb6587fd42042c16f3977146a"} Sep 29 18:58:07 crc kubenswrapper[4792]: I0929 18:58:07.576156 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-8mt6k" event={"ID":"a0cea961-de86-460c-a553-6b1c060f879f","Type":"ContainerStarted","Data":"3889897e7424d6abf0ef779094432b6531c9c09df961dc1b972da3a0f245a3cb"} Sep 29 18:58:09 crc kubenswrapper[4792]: I0929 18:58:09.015360 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 18:58:09 crc kubenswrapper[4792]: I0929 18:58:09.015410 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 18:58:09 crc kubenswrapper[4792]: I0929 18:58:09.015477 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-v5b2m" Sep 29 18:58:09 crc kubenswrapper[4792]: E0929 18:58:09.016316 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 18:58:09 crc kubenswrapper[4792]: I0929 18:58:09.016455 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 18:58:09 crc kubenswrapper[4792]: E0929 18:58:09.016995 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-v5b2m" podUID="fd292349-0e5a-4d80-b163-193aa43c98db" Sep 29 18:58:09 crc kubenswrapper[4792]: E0929 18:58:09.017140 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 18:58:09 crc kubenswrapper[4792]: E0929 18:58:09.017225 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 18:58:09 crc kubenswrapper[4792]: I0929 18:58:09.017636 4792 scope.go:117] "RemoveContainer" containerID="c8f292dcc3508a8a9eee89bc5ade8d636ca7f748ad3ac0af62bf8f356dace386" Sep 29 18:58:09 crc kubenswrapper[4792]: E0929 18:58:09.018124 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-hr4cm_openshift-ovn-kubernetes(716c5fdd-0e02-4066-9210-93d805b6fe81)\"" pod="openshift-ovn-kubernetes/ovnkube-node-hr4cm" podUID="716c5fdd-0e02-4066-9210-93d805b6fe81" Sep 29 18:58:11 crc kubenswrapper[4792]: I0929 18:58:11.015329 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 18:58:11 crc kubenswrapper[4792]: E0929 18:58:11.015689 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 18:58:11 crc kubenswrapper[4792]: I0929 18:58:11.015438 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-v5b2m" Sep 29 18:58:11 crc kubenswrapper[4792]: E0929 18:58:11.015766 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-v5b2m" podUID="fd292349-0e5a-4d80-b163-193aa43c98db" Sep 29 18:58:11 crc kubenswrapper[4792]: I0929 18:58:11.015369 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 18:58:11 crc kubenswrapper[4792]: E0929 18:58:11.015816 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 18:58:11 crc kubenswrapper[4792]: I0929 18:58:11.016016 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 18:58:11 crc kubenswrapper[4792]: E0929 18:58:11.016069 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 18:58:11 crc kubenswrapper[4792]: I0929 18:58:11.422419 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/fd292349-0e5a-4d80-b163-193aa43c98db-metrics-certs\") pod \"network-metrics-daemon-v5b2m\" (UID: \"fd292349-0e5a-4d80-b163-193aa43c98db\") " pod="openshift-multus/network-metrics-daemon-v5b2m" Sep 29 18:58:11 crc kubenswrapper[4792]: E0929 18:58:11.422572 4792 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Sep 29 18:58:11 crc kubenswrapper[4792]: E0929 18:58:11.422635 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/fd292349-0e5a-4d80-b163-193aa43c98db-metrics-certs podName:fd292349-0e5a-4d80-b163-193aa43c98db nodeName:}" failed. No retries permitted until 2025-09-29 18:59:15.422618573 +0000 UTC m=+167.415925969 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/fd292349-0e5a-4d80-b163-193aa43c98db-metrics-certs") pod "network-metrics-daemon-v5b2m" (UID: "fd292349-0e5a-4d80-b163-193aa43c98db") : object "openshift-multus"/"metrics-daemon-secret" not registered Sep 29 18:58:13 crc kubenswrapper[4792]: I0929 18:58:13.015269 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 18:58:13 crc kubenswrapper[4792]: I0929 18:58:13.015298 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-v5b2m" Sep 29 18:58:13 crc kubenswrapper[4792]: I0929 18:58:13.015303 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 18:58:13 crc kubenswrapper[4792]: I0929 18:58:13.015269 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 18:58:13 crc kubenswrapper[4792]: E0929 18:58:13.015456 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-v5b2m" podUID="fd292349-0e5a-4d80-b163-193aa43c98db" Sep 29 18:58:13 crc kubenswrapper[4792]: E0929 18:58:13.015529 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 18:58:13 crc kubenswrapper[4792]: E0929 18:58:13.015644 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 18:58:13 crc kubenswrapper[4792]: E0929 18:58:13.015695 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 18:58:15 crc kubenswrapper[4792]: I0929 18:58:15.014934 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 18:58:15 crc kubenswrapper[4792]: I0929 18:58:15.014934 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 18:58:15 crc kubenswrapper[4792]: I0929 18:58:15.014999 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 18:58:15 crc kubenswrapper[4792]: I0929 18:58:15.015669 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-v5b2m" Sep 29 18:58:15 crc kubenswrapper[4792]: E0929 18:58:15.015795 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 18:58:15 crc kubenswrapper[4792]: E0929 18:58:15.015901 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 18:58:15 crc kubenswrapper[4792]: E0929 18:58:15.016034 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 18:58:15 crc kubenswrapper[4792]: E0929 18:58:15.016177 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-v5b2m" podUID="fd292349-0e5a-4d80-b163-193aa43c98db" Sep 29 18:58:17 crc kubenswrapper[4792]: I0929 18:58:17.014909 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 18:58:17 crc kubenswrapper[4792]: I0929 18:58:17.014930 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-v5b2m" Sep 29 18:58:17 crc kubenswrapper[4792]: E0929 18:58:17.015030 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 18:58:17 crc kubenswrapper[4792]: E0929 18:58:17.015319 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-v5b2m" podUID="fd292349-0e5a-4d80-b163-193aa43c98db" Sep 29 18:58:17 crc kubenswrapper[4792]: I0929 18:58:17.016457 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 18:58:17 crc kubenswrapper[4792]: I0929 18:58:17.016704 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 18:58:17 crc kubenswrapper[4792]: E0929 18:58:17.016912 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 18:58:17 crc kubenswrapper[4792]: E0929 18:58:17.016975 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 18:58:19 crc kubenswrapper[4792]: I0929 18:58:19.015070 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 18:58:19 crc kubenswrapper[4792]: E0929 18:58:19.015229 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 18:58:19 crc kubenswrapper[4792]: I0929 18:58:19.015841 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 18:58:19 crc kubenswrapper[4792]: I0929 18:58:19.016957 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 18:58:19 crc kubenswrapper[4792]: E0929 18:58:19.016630 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 18:58:19 crc kubenswrapper[4792]: I0929 18:58:19.017160 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-v5b2m" Sep 29 18:58:19 crc kubenswrapper[4792]: E0929 18:58:19.017592 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 18:58:19 crc kubenswrapper[4792]: E0929 18:58:19.018437 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-v5b2m" podUID="fd292349-0e5a-4d80-b163-193aa43c98db" Sep 29 18:58:21 crc kubenswrapper[4792]: I0929 18:58:21.015038 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-v5b2m" Sep 29 18:58:21 crc kubenswrapper[4792]: I0929 18:58:21.015104 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 18:58:21 crc kubenswrapper[4792]: I0929 18:58:21.015105 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 18:58:21 crc kubenswrapper[4792]: I0929 18:58:21.015160 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 18:58:21 crc kubenswrapper[4792]: E0929 18:58:21.015315 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-v5b2m" podUID="fd292349-0e5a-4d80-b163-193aa43c98db" Sep 29 18:58:21 crc kubenswrapper[4792]: E0929 18:58:21.015418 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 18:58:21 crc kubenswrapper[4792]: E0929 18:58:21.015511 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 18:58:21 crc kubenswrapper[4792]: E0929 18:58:21.015615 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 18:58:22 crc kubenswrapper[4792]: I0929 18:58:22.015041 4792 scope.go:117] "RemoveContainer" containerID="c8f292dcc3508a8a9eee89bc5ade8d636ca7f748ad3ac0af62bf8f356dace386" Sep 29 18:58:22 crc kubenswrapper[4792]: E0929 18:58:22.015208 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-hr4cm_openshift-ovn-kubernetes(716c5fdd-0e02-4066-9210-93d805b6fe81)\"" pod="openshift-ovn-kubernetes/ovnkube-node-hr4cm" podUID="716c5fdd-0e02-4066-9210-93d805b6fe81" Sep 29 18:58:23 crc kubenswrapper[4792]: I0929 18:58:23.014410 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 18:58:23 crc kubenswrapper[4792]: I0929 18:58:23.014454 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-v5b2m" Sep 29 18:58:23 crc kubenswrapper[4792]: I0929 18:58:23.014533 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 18:58:23 crc kubenswrapper[4792]: E0929 18:58:23.014602 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 18:58:23 crc kubenswrapper[4792]: I0929 18:58:23.014653 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 18:58:23 crc kubenswrapper[4792]: E0929 18:58:23.014830 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-v5b2m" podUID="fd292349-0e5a-4d80-b163-193aa43c98db" Sep 29 18:58:23 crc kubenswrapper[4792]: E0929 18:58:23.015035 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 18:58:23 crc kubenswrapper[4792]: E0929 18:58:23.015148 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 18:58:25 crc kubenswrapper[4792]: I0929 18:58:25.015116 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 18:58:25 crc kubenswrapper[4792]: I0929 18:58:25.015225 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-v5b2m" Sep 29 18:58:25 crc kubenswrapper[4792]: E0929 18:58:25.015329 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 18:58:25 crc kubenswrapper[4792]: I0929 18:58:25.015357 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 18:58:25 crc kubenswrapper[4792]: I0929 18:58:25.015409 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 18:58:25 crc kubenswrapper[4792]: E0929 18:58:25.015456 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 18:58:25 crc kubenswrapper[4792]: E0929 18:58:25.015604 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 18:58:25 crc kubenswrapper[4792]: E0929 18:58:25.015817 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-v5b2m" podUID="fd292349-0e5a-4d80-b163-193aa43c98db" Sep 29 18:58:27 crc kubenswrapper[4792]: I0929 18:58:27.015162 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 18:58:27 crc kubenswrapper[4792]: I0929 18:58:27.015162 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-v5b2m" Sep 29 18:58:27 crc kubenswrapper[4792]: E0929 18:58:27.016264 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 18:58:27 crc kubenswrapper[4792]: I0929 18:58:27.015402 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 18:58:27 crc kubenswrapper[4792]: E0929 18:58:27.016821 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 18:58:27 crc kubenswrapper[4792]: I0929 18:58:27.015190 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 18:58:27 crc kubenswrapper[4792]: E0929 18:58:27.016430 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-v5b2m" podUID="fd292349-0e5a-4d80-b163-193aa43c98db" Sep 29 18:58:27 crc kubenswrapper[4792]: E0929 18:58:27.017392 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 18:58:28 crc kubenswrapper[4792]: I0929 18:58:28.642045 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-5hwvp_100876d3-2539-47f1-91fa-0f91456ccac1/kube-multus/1.log" Sep 29 18:58:28 crc kubenswrapper[4792]: I0929 18:58:28.642786 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-5hwvp_100876d3-2539-47f1-91fa-0f91456ccac1/kube-multus/0.log" Sep 29 18:58:28 crc kubenswrapper[4792]: I0929 18:58:28.642892 4792 generic.go:334] "Generic (PLEG): container finished" podID="100876d3-2539-47f1-91fa-0f91456ccac1" containerID="e36db1c84da57d66a764493ff741136d4bec9e23eb8f9f9517fd82dd9f829e8c" exitCode=1 Sep 29 18:58:28 crc kubenswrapper[4792]: I0929 18:58:28.642940 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-5hwvp" event={"ID":"100876d3-2539-47f1-91fa-0f91456ccac1","Type":"ContainerDied","Data":"e36db1c84da57d66a764493ff741136d4bec9e23eb8f9f9517fd82dd9f829e8c"} Sep 29 18:58:28 crc kubenswrapper[4792]: I0929 18:58:28.642991 4792 scope.go:117] "RemoveContainer" containerID="3ff4d11cfba0349ddf3f5a14c525716cfdff95c71698634e8feca328d6e41e2d" Sep 29 18:58:28 crc kubenswrapper[4792]: I0929 18:58:28.643770 4792 scope.go:117] "RemoveContainer" containerID="e36db1c84da57d66a764493ff741136d4bec9e23eb8f9f9517fd82dd9f829e8c" Sep 29 18:58:28 crc kubenswrapper[4792]: E0929 18:58:28.644121 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 10s restarting failed container=kube-multus pod=multus-5hwvp_openshift-multus(100876d3-2539-47f1-91fa-0f91456ccac1)\"" pod="openshift-multus/multus-5hwvp" podUID="100876d3-2539-47f1-91fa-0f91456ccac1" Sep 29 18:58:28 crc kubenswrapper[4792]: I0929 18:58:28.670819 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-8mt6k" podStartSLOduration=96.670791654 podStartE2EDuration="1m36.670791654s" podCreationTimestamp="2025-09-29 18:56:52 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 18:58:07.596258186 +0000 UTC m=+99.589565642" watchObservedRunningTime="2025-09-29 18:58:28.670791654 +0000 UTC m=+120.664099080" Sep 29 18:58:29 crc kubenswrapper[4792]: I0929 18:58:29.015008 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 18:58:29 crc kubenswrapper[4792]: I0929 18:58:29.015136 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-v5b2m" Sep 29 18:58:29 crc kubenswrapper[4792]: I0929 18:58:29.015161 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 18:58:29 crc kubenswrapper[4792]: E0929 18:58:29.016117 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 18:58:29 crc kubenswrapper[4792]: I0929 18:58:29.016138 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 18:58:29 crc kubenswrapper[4792]: E0929 18:58:29.016300 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-v5b2m" podUID="fd292349-0e5a-4d80-b163-193aa43c98db" Sep 29 18:58:29 crc kubenswrapper[4792]: E0929 18:58:29.016443 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 18:58:29 crc kubenswrapper[4792]: E0929 18:58:29.016605 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 18:58:29 crc kubenswrapper[4792]: E0929 18:58:29.039331 4792 kubelet_node_status.go:497] "Node not becoming ready in time after startup" Sep 29 18:58:29 crc kubenswrapper[4792]: E0929 18:58:29.100462 4792 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Sep 29 18:58:29 crc kubenswrapper[4792]: I0929 18:58:29.648243 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-5hwvp_100876d3-2539-47f1-91fa-0f91456ccac1/kube-multus/1.log" Sep 29 18:58:31 crc kubenswrapper[4792]: I0929 18:58:31.014395 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 18:58:31 crc kubenswrapper[4792]: I0929 18:58:31.014430 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 18:58:31 crc kubenswrapper[4792]: E0929 18:58:31.014890 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 18:58:31 crc kubenswrapper[4792]: E0929 18:58:31.014905 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 18:58:31 crc kubenswrapper[4792]: I0929 18:58:31.014529 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 18:58:31 crc kubenswrapper[4792]: E0929 18:58:31.015017 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 18:58:31 crc kubenswrapper[4792]: I0929 18:58:31.014474 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-v5b2m" Sep 29 18:58:31 crc kubenswrapper[4792]: E0929 18:58:31.015140 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-v5b2m" podUID="fd292349-0e5a-4d80-b163-193aa43c98db" Sep 29 18:58:33 crc kubenswrapper[4792]: I0929 18:58:33.014779 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-v5b2m" Sep 29 18:58:33 crc kubenswrapper[4792]: E0929 18:58:33.014978 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-v5b2m" podUID="fd292349-0e5a-4d80-b163-193aa43c98db" Sep 29 18:58:33 crc kubenswrapper[4792]: I0929 18:58:33.015126 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 18:58:33 crc kubenswrapper[4792]: I0929 18:58:33.015518 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 18:58:33 crc kubenswrapper[4792]: I0929 18:58:33.015539 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 18:58:33 crc kubenswrapper[4792]: E0929 18:58:33.015843 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 18:58:33 crc kubenswrapper[4792]: E0929 18:58:33.016055 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 18:58:33 crc kubenswrapper[4792]: E0929 18:58:33.016192 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 18:58:34 crc kubenswrapper[4792]: I0929 18:58:34.016366 4792 scope.go:117] "RemoveContainer" containerID="c8f292dcc3508a8a9eee89bc5ade8d636ca7f748ad3ac0af62bf8f356dace386" Sep 29 18:58:34 crc kubenswrapper[4792]: E0929 18:58:34.102654 4792 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Sep 29 18:58:34 crc kubenswrapper[4792]: I0929 18:58:34.668581 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-hr4cm_716c5fdd-0e02-4066-9210-93d805b6fe81/ovnkube-controller/3.log" Sep 29 18:58:34 crc kubenswrapper[4792]: I0929 18:58:34.673335 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-hr4cm" event={"ID":"716c5fdd-0e02-4066-9210-93d805b6fe81","Type":"ContainerStarted","Data":"7ae9d48d57439129f246e25077eb0e832a613136c8e2873a6194e55667617f86"} Sep 29 18:58:34 crc kubenswrapper[4792]: I0929 18:58:34.673978 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-hr4cm" Sep 29 18:58:34 crc kubenswrapper[4792]: I0929 18:58:34.708134 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-node-hr4cm" podStartSLOduration=101.708115139 podStartE2EDuration="1m41.708115139s" podCreationTimestamp="2025-09-29 18:56:53 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 18:58:34.706579489 +0000 UTC m=+126.699886895" watchObservedRunningTime="2025-09-29 18:58:34.708115139 +0000 UTC m=+126.701422545" Sep 29 18:58:34 crc kubenswrapper[4792]: I0929 18:58:34.958545 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/network-metrics-daemon-v5b2m"] Sep 29 18:58:34 crc kubenswrapper[4792]: I0929 18:58:34.958656 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-v5b2m" Sep 29 18:58:34 crc kubenswrapper[4792]: E0929 18:58:34.958761 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-v5b2m" podUID="fd292349-0e5a-4d80-b163-193aa43c98db" Sep 29 18:58:35 crc kubenswrapper[4792]: I0929 18:58:35.015054 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 18:58:35 crc kubenswrapper[4792]: E0929 18:58:35.015163 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 18:58:35 crc kubenswrapper[4792]: I0929 18:58:35.015412 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 18:58:35 crc kubenswrapper[4792]: E0929 18:58:35.015633 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 18:58:35 crc kubenswrapper[4792]: I0929 18:58:35.015784 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 18:58:35 crc kubenswrapper[4792]: E0929 18:58:35.016013 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 18:58:37 crc kubenswrapper[4792]: I0929 18:58:37.015335 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 18:58:37 crc kubenswrapper[4792]: E0929 18:58:37.015692 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 18:58:37 crc kubenswrapper[4792]: I0929 18:58:37.015568 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 18:58:37 crc kubenswrapper[4792]: I0929 18:58:37.015750 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 18:58:37 crc kubenswrapper[4792]: E0929 18:58:37.015783 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 18:58:37 crc kubenswrapper[4792]: I0929 18:58:37.015483 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-v5b2m" Sep 29 18:58:37 crc kubenswrapper[4792]: E0929 18:58:37.016010 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 18:58:37 crc kubenswrapper[4792]: E0929 18:58:37.016284 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-v5b2m" podUID="fd292349-0e5a-4d80-b163-193aa43c98db" Sep 29 18:58:39 crc kubenswrapper[4792]: I0929 18:58:39.015371 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-v5b2m" Sep 29 18:58:39 crc kubenswrapper[4792]: I0929 18:58:39.015426 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 18:58:39 crc kubenswrapper[4792]: I0929 18:58:39.015366 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 18:58:39 crc kubenswrapper[4792]: I0929 18:58:39.015985 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 18:58:39 crc kubenswrapper[4792]: E0929 18:58:39.016701 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-v5b2m" podUID="fd292349-0e5a-4d80-b163-193aa43c98db" Sep 29 18:58:39 crc kubenswrapper[4792]: E0929 18:58:39.016794 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 18:58:39 crc kubenswrapper[4792]: E0929 18:58:39.016890 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 18:58:39 crc kubenswrapper[4792]: E0929 18:58:39.016942 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 18:58:39 crc kubenswrapper[4792]: E0929 18:58:39.104308 4792 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Sep 29 18:58:41 crc kubenswrapper[4792]: I0929 18:58:41.016117 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 18:58:41 crc kubenswrapper[4792]: I0929 18:58:41.016346 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 18:58:41 crc kubenswrapper[4792]: E0929 18:58:41.016493 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 18:58:41 crc kubenswrapper[4792]: I0929 18:58:41.016579 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 18:58:41 crc kubenswrapper[4792]: E0929 18:58:41.016825 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 18:58:41 crc kubenswrapper[4792]: E0929 18:58:41.016955 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 18:58:41 crc kubenswrapper[4792]: I0929 18:58:41.017267 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-v5b2m" Sep 29 18:58:41 crc kubenswrapper[4792]: E0929 18:58:41.017508 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-v5b2m" podUID="fd292349-0e5a-4d80-b163-193aa43c98db" Sep 29 18:58:42 crc kubenswrapper[4792]: I0929 18:58:42.015987 4792 scope.go:117] "RemoveContainer" containerID="e36db1c84da57d66a764493ff741136d4bec9e23eb8f9f9517fd82dd9f829e8c" Sep 29 18:58:42 crc kubenswrapper[4792]: I0929 18:58:42.708962 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-5hwvp_100876d3-2539-47f1-91fa-0f91456ccac1/kube-multus/1.log" Sep 29 18:58:42 crc kubenswrapper[4792]: I0929 18:58:42.709031 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-5hwvp" event={"ID":"100876d3-2539-47f1-91fa-0f91456ccac1","Type":"ContainerStarted","Data":"ce5b36817c4429539a535b70ada4c3b33d548c1cffdb995523a6276f42da0607"} Sep 29 18:58:43 crc kubenswrapper[4792]: I0929 18:58:43.015099 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-v5b2m" Sep 29 18:58:43 crc kubenswrapper[4792]: I0929 18:58:43.015196 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 18:58:43 crc kubenswrapper[4792]: I0929 18:58:43.015249 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 18:58:43 crc kubenswrapper[4792]: I0929 18:58:43.015331 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 18:58:43 crc kubenswrapper[4792]: E0929 18:58:43.015486 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-v5b2m" podUID="fd292349-0e5a-4d80-b163-193aa43c98db" Sep 29 18:58:43 crc kubenswrapper[4792]: E0929 18:58:43.015642 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 18:58:43 crc kubenswrapper[4792]: E0929 18:58:43.015772 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 18:58:43 crc kubenswrapper[4792]: E0929 18:58:43.016013 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 18:58:45 crc kubenswrapper[4792]: I0929 18:58:45.015029 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 18:58:45 crc kubenswrapper[4792]: I0929 18:58:45.015034 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-v5b2m" Sep 29 18:58:45 crc kubenswrapper[4792]: I0929 18:58:45.015067 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 18:58:45 crc kubenswrapper[4792]: I0929 18:58:45.015223 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 18:58:45 crc kubenswrapper[4792]: I0929 18:58:45.020411 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"openshift-service-ca.crt" Sep 29 18:58:45 crc kubenswrapper[4792]: I0929 18:58:45.021824 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"kube-root-ca.crt" Sep 29 18:58:45 crc kubenswrapper[4792]: I0929 18:58:45.021943 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-secret" Sep 29 18:58:45 crc kubenswrapper[4792]: I0929 18:58:45.021971 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-console"/"networking-console-plugin-cert" Sep 29 18:58:45 crc kubenswrapper[4792]: I0929 18:58:45.022319 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-console"/"networking-console-plugin" Sep 29 18:58:45 crc kubenswrapper[4792]: I0929 18:58:45.023063 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-sa-dockercfg-d427c" Sep 29 18:58:47 crc kubenswrapper[4792]: I0929 18:58:47.744017 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeReady" Sep 29 18:58:47 crc kubenswrapper[4792]: I0929 18:58:47.839833 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-l44c7"] Sep 29 18:58:47 crc kubenswrapper[4792]: I0929 18:58:47.840446 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-l44c7" Sep 29 18:58:47 crc kubenswrapper[4792]: I0929 18:58:47.843821 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-p9pds"] Sep 29 18:58:47 crc kubenswrapper[4792]: I0929 18:58:47.844216 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-p9pds" Sep 29 18:58:47 crc kubenswrapper[4792]: I0929 18:58:47.844222 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Sep 29 18:58:47 crc kubenswrapper[4792]: I0929 18:58:47.845126 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Sep 29 18:58:47 crc kubenswrapper[4792]: I0929 18:58:47.848402 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Sep 29 18:58:47 crc kubenswrapper[4792]: I0929 18:58:47.848463 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Sep 29 18:58:47 crc kubenswrapper[4792]: I0929 18:58:47.848541 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Sep 29 18:58:47 crc kubenswrapper[4792]: I0929 18:58:47.848722 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Sep 29 18:58:47 crc kubenswrapper[4792]: I0929 18:58:47.850889 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-4bgtx"] Sep 29 18:58:47 crc kubenswrapper[4792]: I0929 18:58:47.851821 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver/apiserver-76f77b778f-4bgtx" Sep 29 18:58:47 crc kubenswrapper[4792]: I0929 18:58:47.853239 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-vmw2h"] Sep 29 18:58:47 crc kubenswrapper[4792]: I0929 18:58:47.853997 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-vmw2h" Sep 29 18:58:47 crc kubenswrapper[4792]: I0929 18:58:47.854412 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-machine-approver/machine-approver-56656f9798-5j6xh"] Sep 29 18:58:47 crc kubenswrapper[4792]: I0929 18:58:47.855216 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-5j6xh" Sep 29 18:58:47 crc kubenswrapper[4792]: I0929 18:58:47.867493 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-t9hk9"] Sep 29 18:58:47 crc kubenswrapper[4792]: I0929 18:58:47.868068 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-t9hk9" Sep 29 18:58:47 crc kubenswrapper[4792]: I0929 18:58:47.875280 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-w22fq"] Sep 29 18:58:47 crc kubenswrapper[4792]: I0929 18:58:47.875980 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-w22fq" Sep 29 18:58:47 crc kubenswrapper[4792]: I0929 18:58:47.881630 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"trusted-ca-bundle" Sep 29 18:58:47 crc kubenswrapper[4792]: I0929 18:58:47.881807 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"openshift-service-ca.crt" Sep 29 18:58:47 crc kubenswrapper[4792]: I0929 18:58:47.882337 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"etcd-serving-ca" Sep 29 18:58:47 crc kubenswrapper[4792]: I0929 18:58:47.882405 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"etcd-client" Sep 29 18:58:47 crc kubenswrapper[4792]: I0929 18:58:47.882422 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"config" Sep 29 18:58:47 crc kubenswrapper[4792]: I0929 18:58:47.882419 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"encryption-config-1" Sep 29 18:58:47 crc kubenswrapper[4792]: I0929 18:58:47.882744 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Sep 29 18:58:47 crc kubenswrapper[4792]: I0929 18:58:47.882961 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"audit-1" Sep 29 18:58:47 crc kubenswrapper[4792]: I0929 18:58:47.882981 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Sep 29 18:58:47 crc kubenswrapper[4792]: I0929 18:58:47.883112 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"audit-1" Sep 29 18:58:47 crc kubenswrapper[4792]: I0929 18:58:47.883338 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"serving-cert" Sep 29 18:58:47 crc kubenswrapper[4792]: I0929 18:58:47.883363 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-service-ca.crt" Sep 29 18:58:47 crc kubenswrapper[4792]: I0929 18:58:47.883420 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"kube-root-ca.crt" Sep 29 18:58:47 crc kubenswrapper[4792]: I0929 18:58:47.883569 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"image-import-ca" Sep 29 18:58:47 crc kubenswrapper[4792]: I0929 18:58:47.883727 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Sep 29 18:58:47 crc kubenswrapper[4792]: I0929 18:58:47.883766 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"openshift-service-ca.crt" Sep 29 18:58:47 crc kubenswrapper[4792]: I0929 18:58:47.883880 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Sep 29 18:58:47 crc kubenswrapper[4792]: I0929 18:58:47.884839 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"serving-cert" Sep 29 18:58:47 crc kubenswrapper[4792]: I0929 18:58:47.887500 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"encryption-config-1" Sep 29 18:58:47 crc kubenswrapper[4792]: I0929 18:58:47.887676 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"etcd-serving-ca" Sep 29 18:58:47 crc kubenswrapper[4792]: I0929 18:58:47.890018 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Sep 29 18:58:47 crc kubenswrapper[4792]: I0929 18:58:47.899108 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-dockercfg-xtcjv" Sep 29 18:58:47 crc kubenswrapper[4792]: I0929 18:58:47.899471 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-config" Sep 29 18:58:47 crc kubenswrapper[4792]: I0929 18:58:47.899614 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"kube-root-ca.crt" Sep 29 18:58:47 crc kubenswrapper[4792]: I0929 18:58:47.899963 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Sep 29 18:58:47 crc kubenswrapper[4792]: I0929 18:58:47.902191 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-serving-cert" Sep 29 18:58:47 crc kubenswrapper[4792]: I0929 18:58:47.902503 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"openshift-service-ca.crt" Sep 29 18:58:47 crc kubenswrapper[4792]: I0929 18:58:47.902704 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-rbac-proxy" Sep 29 18:58:47 crc kubenswrapper[4792]: I0929 18:58:47.902810 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"machine-approver-config" Sep 29 18:58:47 crc kubenswrapper[4792]: I0929 18:58:47.907710 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"openshift-service-ca.crt" Sep 29 18:58:47 crc kubenswrapper[4792]: I0929 18:58:47.908288 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"kube-root-ca.crt" Sep 29 18:58:47 crc kubenswrapper[4792]: I0929 18:58:47.909301 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"openshift-apiserver-sa-dockercfg-djjff" Sep 29 18:58:47 crc kubenswrapper[4792]: I0929 18:58:47.909742 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"etcd-client" Sep 29 18:58:47 crc kubenswrapper[4792]: I0929 18:58:47.917765 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"pprof-cert" Sep 29 18:58:47 crc kubenswrapper[4792]: I0929 18:58:47.917969 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"kube-root-ca.crt" Sep 29 18:58:47 crc kubenswrapper[4792]: I0929 18:58:47.918062 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-sa-dockercfg-nl2j4" Sep 29 18:58:47 crc kubenswrapper[4792]: I0929 18:58:47.918221 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-tls" Sep 29 18:58:47 crc kubenswrapper[4792]: I0929 18:58:47.918292 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-root-ca.crt" Sep 29 18:58:47 crc kubenswrapper[4792]: I0929 18:58:47.920363 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"oauth-apiserver-sa-dockercfg-6r2bq" Sep 29 18:58:47 crc kubenswrapper[4792]: I0929 18:58:47.930073 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serving-cert" Sep 29 18:58:47 crc kubenswrapper[4792]: I0929 18:58:47.930462 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-vt754"] Sep 29 18:58:47 crc kubenswrapper[4792]: I0929 18:58:47.930591 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serviceaccount-dockercfg-rq7zk" Sep 29 18:58:47 crc kubenswrapper[4792]: I0929 18:58:47.931348 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-vt754" Sep 29 18:58:47 crc kubenswrapper[4792]: I0929 18:58:47.931509 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-bfnfv"] Sep 29 18:58:47 crc kubenswrapper[4792]: I0929 18:58:47.932247 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-bfnfv" Sep 29 18:58:47 crc kubenswrapper[4792]: I0929 18:58:47.933985 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-p4lzh"] Sep 29 18:58:47 crc kubenswrapper[4792]: I0929 18:58:47.934392 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-p4lzh" Sep 29 18:58:47 crc kubenswrapper[4792]: I0929 18:58:47.936322 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"packageserver-service-cert" Sep 29 18:58:47 crc kubenswrapper[4792]: I0929 18:58:47.936571 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"package-server-manager-serving-cert" Sep 29 18:58:47 crc kubenswrapper[4792]: I0929 18:58:47.940615 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-6r69d"] Sep 29 18:58:47 crc kubenswrapper[4792]: I0929 18:58:47.940811 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Sep 29 18:58:47 crc kubenswrapper[4792]: I0929 18:58:47.941277 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-bnclj"] Sep 29 18:58:47 crc kubenswrapper[4792]: I0929 18:58:47.942545 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-d27m9"] Sep 29 18:58:47 crc kubenswrapper[4792]: I0929 18:58:47.942683 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"catalog-operator-serving-cert" Sep 29 18:58:47 crc kubenswrapper[4792]: I0929 18:58:47.941424 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-6r69d" Sep 29 18:58:47 crc kubenswrapper[4792]: I0929 18:58:47.943044 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-bnclj" Sep 29 18:58:47 crc kubenswrapper[4792]: I0929 18:58:47.941451 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/2b5b8e64-d53b-407e-a10f-d4fed5afd70c-audit-dir\") pod \"apiserver-7bbb656c7d-vmw2h\" (UID: \"2b5b8e64-d53b-407e-a10f-d4fed5afd70c\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-vmw2h" Sep 29 18:58:47 crc kubenswrapper[4792]: I0929 18:58:47.943222 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ada0d65e-bf4b-40ec-a03f-d0009526f8b6-config\") pod \"openshift-apiserver-operator-796bbdcf4f-t9hk9\" (UID: \"ada0d65e-bf4b-40ec-a03f-d0009526f8b6\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-t9hk9" Sep 29 18:58:47 crc kubenswrapper[4792]: I0929 18:58:47.943259 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fcd90e7a-6ae8-4065-bcab-5d3f8fffc5a1-auth-proxy-config\") pod \"machine-approver-56656f9798-5j6xh\" (UID: \"fcd90e7a-6ae8-4065-bcab-5d3f8fffc5a1\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-5j6xh" Sep 29 18:58:47 crc kubenswrapper[4792]: I0929 18:58:47.943322 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/3a5115bb-23d8-4ff0-9c56-419450cd87fe-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-p9pds\" (UID: \"3a5115bb-23d8-4ff0-9c56-419450cd87fe\") " pod="openshift-controller-manager/controller-manager-879f6c89f-p9pds" Sep 29 18:58:47 crc kubenswrapper[4792]: I0929 18:58:47.943358 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5d25q\" (UniqueName: \"kubernetes.io/projected/4b90ed41-b2cd-4525-b5e2-11513ee0c763-kube-api-access-5d25q\") pod \"apiserver-76f77b778f-4bgtx\" (UID: \"4b90ed41-b2cd-4525-b5e2-11513ee0c763\") " pod="openshift-apiserver/apiserver-76f77b778f-4bgtx" Sep 29 18:58:47 crc kubenswrapper[4792]: I0929 18:58:47.943432 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/4b90ed41-b2cd-4525-b5e2-11513ee0c763-trusted-ca-bundle\") pod \"apiserver-76f77b778f-4bgtx\" (UID: \"4b90ed41-b2cd-4525-b5e2-11513ee0c763\") " pod="openshift-apiserver/apiserver-76f77b778f-4bgtx" Sep 29 18:58:47 crc kubenswrapper[4792]: I0929 18:58:47.943500 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5n95f\" (UniqueName: \"kubernetes.io/projected/ada0d65e-bf4b-40ec-a03f-d0009526f8b6-kube-api-access-5n95f\") pod \"openshift-apiserver-operator-796bbdcf4f-t9hk9\" (UID: \"ada0d65e-bf4b-40ec-a03f-d0009526f8b6\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-t9hk9" Sep 29 18:58:47 crc kubenswrapper[4792]: I0929 18:58:47.943534 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4b90ed41-b2cd-4525-b5e2-11513ee0c763-config\") pod \"apiserver-76f77b778f-4bgtx\" (UID: \"4b90ed41-b2cd-4525-b5e2-11513ee0c763\") " pod="openshift-apiserver/apiserver-76f77b778f-4bgtx" Sep 29 18:58:47 crc kubenswrapper[4792]: I0929 18:58:47.943579 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/53064b9c-6401-4332-b64a-b8cbc84ae37c-serving-cert\") pod \"route-controller-manager-6576b87f9c-l44c7\" (UID: \"53064b9c-6401-4332-b64a-b8cbc84ae37c\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-l44c7" Sep 29 18:58:47 crc kubenswrapper[4792]: I0929 18:58:47.943613 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/fcd90e7a-6ae8-4065-bcab-5d3f8fffc5a1-machine-approver-tls\") pod \"machine-approver-56656f9798-5j6xh\" (UID: \"fcd90e7a-6ae8-4065-bcab-5d3f8fffc5a1\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-5j6xh" Sep 29 18:58:47 crc kubenswrapper[4792]: I0929 18:58:47.943643 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vsdl6\" (UniqueName: \"kubernetes.io/projected/fcd90e7a-6ae8-4065-bcab-5d3f8fffc5a1-kube-api-access-vsdl6\") pod \"machine-approver-56656f9798-5j6xh\" (UID: \"fcd90e7a-6ae8-4065-bcab-5d3f8fffc5a1\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-5j6xh" Sep 29 18:58:47 crc kubenswrapper[4792]: I0929 18:58:47.943673 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/2b5b8e64-d53b-407e-a10f-d4fed5afd70c-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-vmw2h\" (UID: \"2b5b8e64-d53b-407e-a10f-d4fed5afd70c\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-vmw2h" Sep 29 18:58:47 crc kubenswrapper[4792]: I0929 18:58:47.943709 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/4b90ed41-b2cd-4525-b5e2-11513ee0c763-etcd-serving-ca\") pod \"apiserver-76f77b778f-4bgtx\" (UID: \"4b90ed41-b2cd-4525-b5e2-11513ee0c763\") " pod="openshift-apiserver/apiserver-76f77b778f-4bgtx" Sep 29 18:58:47 crc kubenswrapper[4792]: I0929 18:58:47.943734 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4b90ed41-b2cd-4525-b5e2-11513ee0c763-serving-cert\") pod \"apiserver-76f77b778f-4bgtx\" (UID: \"4b90ed41-b2cd-4525-b5e2-11513ee0c763\") " pod="openshift-apiserver/apiserver-76f77b778f-4bgtx" Sep 29 18:58:47 crc kubenswrapper[4792]: I0929 18:58:47.943771 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/4b90ed41-b2cd-4525-b5e2-11513ee0c763-audit-dir\") pod \"apiserver-76f77b778f-4bgtx\" (UID: \"4b90ed41-b2cd-4525-b5e2-11513ee0c763\") " pod="openshift-apiserver/apiserver-76f77b778f-4bgtx" Sep 29 18:58:47 crc kubenswrapper[4792]: I0929 18:58:47.943813 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4bktj\" (UniqueName: \"kubernetes.io/projected/53064b9c-6401-4332-b64a-b8cbc84ae37c-kube-api-access-4bktj\") pod \"route-controller-manager-6576b87f9c-l44c7\" (UID: \"53064b9c-6401-4332-b64a-b8cbc84ae37c\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-l44c7" Sep 29 18:58:47 crc kubenswrapper[4792]: I0929 18:58:47.943892 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/4b90ed41-b2cd-4525-b5e2-11513ee0c763-node-pullsecrets\") pod \"apiserver-76f77b778f-4bgtx\" (UID: \"4b90ed41-b2cd-4525-b5e2-11513ee0c763\") " pod="openshift-apiserver/apiserver-76f77b778f-4bgtx" Sep 29 18:58:47 crc kubenswrapper[4792]: I0929 18:58:47.943914 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/4b90ed41-b2cd-4525-b5e2-11513ee0c763-image-import-ca\") pod \"apiserver-76f77b778f-4bgtx\" (UID: \"4b90ed41-b2cd-4525-b5e2-11513ee0c763\") " pod="openshift-apiserver/apiserver-76f77b778f-4bgtx" Sep 29 18:58:47 crc kubenswrapper[4792]: I0929 18:58:47.943929 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/4b90ed41-b2cd-4525-b5e2-11513ee0c763-encryption-config\") pod \"apiserver-76f77b778f-4bgtx\" (UID: \"4b90ed41-b2cd-4525-b5e2-11513ee0c763\") " pod="openshift-apiserver/apiserver-76f77b778f-4bgtx" Sep 29 18:58:47 crc kubenswrapper[4792]: I0929 18:58:47.943960 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/4b90ed41-b2cd-4525-b5e2-11513ee0c763-etcd-client\") pod \"apiserver-76f77b778f-4bgtx\" (UID: \"4b90ed41-b2cd-4525-b5e2-11513ee0c763\") " pod="openshift-apiserver/apiserver-76f77b778f-4bgtx" Sep 29 18:58:47 crc kubenswrapper[4792]: I0929 18:58:47.943997 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/2b5b8e64-d53b-407e-a10f-d4fed5afd70c-encryption-config\") pod \"apiserver-7bbb656c7d-vmw2h\" (UID: \"2b5b8e64-d53b-407e-a10f-d4fed5afd70c\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-vmw2h" Sep 29 18:58:47 crc kubenswrapper[4792]: I0929 18:58:47.944021 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fcd90e7a-6ae8-4065-bcab-5d3f8fffc5a1-config\") pod \"machine-approver-56656f9798-5j6xh\" (UID: \"fcd90e7a-6ae8-4065-bcab-5d3f8fffc5a1\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-5j6xh" Sep 29 18:58:47 crc kubenswrapper[4792]: I0929 18:58:47.944125 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/3a5115bb-23d8-4ff0-9c56-419450cd87fe-client-ca\") pod \"controller-manager-879f6c89f-p9pds\" (UID: \"3a5115bb-23d8-4ff0-9c56-419450cd87fe\") " pod="openshift-controller-manager/controller-manager-879f6c89f-p9pds" Sep 29 18:58:47 crc kubenswrapper[4792]: I0929 18:58:47.944159 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/2b5b8e64-d53b-407e-a10f-d4fed5afd70c-audit-policies\") pod \"apiserver-7bbb656c7d-vmw2h\" (UID: \"2b5b8e64-d53b-407e-a10f-d4fed5afd70c\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-vmw2h" Sep 29 18:58:47 crc kubenswrapper[4792]: I0929 18:58:47.944205 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3a5115bb-23d8-4ff0-9c56-419450cd87fe-config\") pod \"controller-manager-879f6c89f-p9pds\" (UID: \"3a5115bb-23d8-4ff0-9c56-419450cd87fe\") " pod="openshift-controller-manager/controller-manager-879f6c89f-p9pds" Sep 29 18:58:47 crc kubenswrapper[4792]: I0929 18:58:47.944276 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/2b5b8e64-d53b-407e-a10f-d4fed5afd70c-serving-cert\") pod \"apiserver-7bbb656c7d-vmw2h\" (UID: \"2b5b8e64-d53b-407e-a10f-d4fed5afd70c\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-vmw2h" Sep 29 18:58:47 crc kubenswrapper[4792]: I0929 18:58:47.944312 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/53064b9c-6401-4332-b64a-b8cbc84ae37c-config\") pod \"route-controller-manager-6576b87f9c-l44c7\" (UID: \"53064b9c-6401-4332-b64a-b8cbc84ae37c\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-l44c7" Sep 29 18:58:47 crc kubenswrapper[4792]: I0929 18:58:47.944361 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/2b5b8e64-d53b-407e-a10f-d4fed5afd70c-etcd-client\") pod \"apiserver-7bbb656c7d-vmw2h\" (UID: \"2b5b8e64-d53b-407e-a10f-d4fed5afd70c\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-vmw2h" Sep 29 18:58:47 crc kubenswrapper[4792]: I0929 18:58:47.944409 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dzrsr\" (UniqueName: \"kubernetes.io/projected/2b5b8e64-d53b-407e-a10f-d4fed5afd70c-kube-api-access-dzrsr\") pod \"apiserver-7bbb656c7d-vmw2h\" (UID: \"2b5b8e64-d53b-407e-a10f-d4fed5afd70c\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-vmw2h" Sep 29 18:58:47 crc kubenswrapper[4792]: I0929 18:58:47.944445 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3a5115bb-23d8-4ff0-9c56-419450cd87fe-serving-cert\") pod \"controller-manager-879f6c89f-p9pds\" (UID: \"3a5115bb-23d8-4ff0-9c56-419450cd87fe\") " pod="openshift-controller-manager/controller-manager-879f6c89f-p9pds" Sep 29 18:58:47 crc kubenswrapper[4792]: I0929 18:58:47.944470 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/2b5b8e64-d53b-407e-a10f-d4fed5afd70c-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-vmw2h\" (UID: \"2b5b8e64-d53b-407e-a10f-d4fed5afd70c\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-vmw2h" Sep 29 18:58:47 crc kubenswrapper[4792]: I0929 18:58:47.944545 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/53064b9c-6401-4332-b64a-b8cbc84ae37c-client-ca\") pod \"route-controller-manager-6576b87f9c-l44c7\" (UID: \"53064b9c-6401-4332-b64a-b8cbc84ae37c\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-l44c7" Sep 29 18:58:47 crc kubenswrapper[4792]: I0929 18:58:47.944680 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/4b90ed41-b2cd-4525-b5e2-11513ee0c763-audit\") pod \"apiserver-76f77b778f-4bgtx\" (UID: \"4b90ed41-b2cd-4525-b5e2-11513ee0c763\") " pod="openshift-apiserver/apiserver-76f77b778f-4bgtx" Sep 29 18:58:47 crc kubenswrapper[4792]: I0929 18:58:47.944727 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wgwqq\" (UniqueName: \"kubernetes.io/projected/3a5115bb-23d8-4ff0-9c56-419450cd87fe-kube-api-access-wgwqq\") pod \"controller-manager-879f6c89f-p9pds\" (UID: \"3a5115bb-23d8-4ff0-9c56-419450cd87fe\") " pod="openshift-controller-manager/controller-manager-879f6c89f-p9pds" Sep 29 18:58:47 crc kubenswrapper[4792]: I0929 18:58:47.944760 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ada0d65e-bf4b-40ec-a03f-d0009526f8b6-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-t9hk9\" (UID: \"ada0d65e-bf4b-40ec-a03f-d0009526f8b6\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-t9hk9" Sep 29 18:58:47 crc kubenswrapper[4792]: I0929 18:58:47.947497 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-root-ca.crt" Sep 29 18:58:47 crc kubenswrapper[4792]: I0929 18:58:47.947679 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-config" Sep 29 18:58:47 crc kubenswrapper[4792]: I0929 18:58:47.947829 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-metrics" Sep 29 18:58:47 crc kubenswrapper[4792]: I0929 18:58:47.947976 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"openshift-service-ca.crt" Sep 29 18:58:47 crc kubenswrapper[4792]: I0929 18:58:47.948115 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"kube-root-ca.crt" Sep 29 18:58:47 crc kubenswrapper[4792]: I0929 18:58:47.948421 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-dockercfg-5nsgg" Sep 29 18:58:47 crc kubenswrapper[4792]: I0929 18:58:47.951152 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-serving-cert" Sep 29 18:58:47 crc kubenswrapper[4792]: I0929 18:58:47.951419 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-dockercfg-gkqpw" Sep 29 18:58:47 crc kubenswrapper[4792]: I0929 18:58:47.952416 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress/router-default-5444994796-ldssp"] Sep 29 18:58:47 crc kubenswrapper[4792]: I0929 18:58:47.952894 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-x758z"] Sep 29 18:58:47 crc kubenswrapper[4792]: I0929 18:58:47.953442 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-x758z" Sep 29 18:58:47 crc kubenswrapper[4792]: I0929 18:58:47.953717 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"trusted-ca-bundle" Sep 29 18:58:47 crc kubenswrapper[4792]: I0929 18:58:47.953077 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress/router-default-5444994796-ldssp" Sep 29 18:58:47 crc kubenswrapper[4792]: I0929 18:58:47.953108 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-d27m9" Sep 29 18:58:47 crc kubenswrapper[4792]: I0929 18:58:47.957205 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-sqpsg"] Sep 29 18:58:47 crc kubenswrapper[4792]: I0929 18:58:47.958085 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-nv8fj"] Sep 29 18:58:47 crc kubenswrapper[4792]: I0929 18:58:47.958476 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-b89qf"] Sep 29 18:58:47 crc kubenswrapper[4792]: I0929 18:58:47.959000 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-sqpsg" Sep 29 18:58:47 crc kubenswrapper[4792]: I0929 18:58:47.959063 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-b89qf" Sep 29 18:58:47 crc kubenswrapper[4792]: I0929 18:58:47.959113 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-h2hml"] Sep 29 18:58:47 crc kubenswrapper[4792]: I0929 18:58:47.959113 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-nv8fj" Sep 29 18:58:47 crc kubenswrapper[4792]: I0929 18:58:47.963521 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-h2hml" Sep 29 18:58:47 crc kubenswrapper[4792]: I0929 18:58:47.968474 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"kube-storage-version-migrator-operator-dockercfg-2bh8d" Sep 29 18:58:47 crc kubenswrapper[4792]: I0929 18:58:47.970284 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-dockercfg-qt55r" Sep 29 18:58:47 crc kubenswrapper[4792]: I0929 18:58:47.970473 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-stats-default" Sep 29 18:58:47 crc kubenswrapper[4792]: I0929 18:58:47.970963 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"service-ca-bundle" Sep 29 18:58:47 crc kubenswrapper[4792]: I0929 18:58:47.971239 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"openshift-service-ca.crt" Sep 29 18:58:47 crc kubenswrapper[4792]: I0929 18:58:47.971481 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"config" Sep 29 18:58:47 crc kubenswrapper[4792]: I0929 18:58:47.971844 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"kube-root-ca.crt" Sep 29 18:58:47 crc kubenswrapper[4792]: I0929 18:58:47.972024 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-certs-default" Sep 29 18:58:47 crc kubenswrapper[4792]: I0929 18:58:47.975222 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"cluster-image-registry-operator-dockercfg-m4qtx" Sep 29 18:58:47 crc kubenswrapper[4792]: I0929 18:58:47.975362 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-ljgr4"] Sep 29 18:58:47 crc kubenswrapper[4792]: I0929 18:58:47.975515 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-metrics-certs-default" Sep 29 18:58:47 crc kubenswrapper[4792]: I0929 18:58:47.975652 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"kube-root-ca.crt" Sep 29 18:58:47 crc kubenswrapper[4792]: I0929 18:58:47.975806 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"serving-cert" Sep 29 18:58:47 crc kubenswrapper[4792]: I0929 18:58:47.975947 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-dockercfg-zdk86" Sep 29 18:58:47 crc kubenswrapper[4792]: I0929 18:58:47.976093 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"kube-scheduler-operator-serving-cert" Sep 29 18:58:47 crc kubenswrapper[4792]: I0929 18:58:47.976248 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"kube-root-ca.crt" Sep 29 18:58:47 crc kubenswrapper[4792]: I0929 18:58:47.979318 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-dvq4s"] Sep 29 18:58:47 crc kubenswrapper[4792]: I0929 18:58:47.980087 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"openshift-service-ca.crt" Sep 29 18:58:47 crc kubenswrapper[4792]: I0929 18:58:47.980588 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-operator-dockercfg-98p87" Sep 29 18:58:47 crc kubenswrapper[4792]: I0929 18:58:47.980938 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"machine-config-operator-images" Sep 29 18:58:47 crc kubenswrapper[4792]: I0929 18:58:47.981070 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-fnpzd"] Sep 29 18:58:47 crc kubenswrapper[4792]: I0929 18:58:47.981129 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"openshift-service-ca.crt" Sep 29 18:58:47 crc kubenswrapper[4792]: I0929 18:58:47.981532 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-operator-tls" Sep 29 18:58:47 crc kubenswrapper[4792]: I0929 18:58:47.981673 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-config" Sep 29 18:58:47 crc kubenswrapper[4792]: I0929 18:58:47.981800 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mco-proxy-tls" Sep 29 18:58:47 crc kubenswrapper[4792]: I0929 18:58:47.981966 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-root-ca.crt" Sep 29 18:58:47 crc kubenswrapper[4792]: I0929 18:58:47.982102 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-dockercfg-x57mr" Sep 29 18:58:47 crc kubenswrapper[4792]: I0929 18:58:47.982215 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319525-kwqwh"] Sep 29 18:58:47 crc kubenswrapper[4792]: I0929 18:58:47.982520 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-admission-controller-857f4d67dd-dvq4s" Sep 29 18:58:47 crc kubenswrapper[4792]: I0929 18:58:47.982838 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-ljgr4" Sep 29 18:58:47 crc kubenswrapper[4792]: I0929 18:58:47.982904 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319525-kwqwh" Sep 29 18:58:47 crc kubenswrapper[4792]: I0929 18:58:47.983003 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-ktzhf"] Sep 29 18:58:47 crc kubenswrapper[4792]: I0929 18:58:47.983315 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-fnpzd" Sep 29 18:58:47 crc kubenswrapper[4792]: I0929 18:58:47.982233 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-config" Sep 29 18:58:47 crc kubenswrapper[4792]: I0929 18:58:47.982313 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-serving-cert" Sep 29 18:58:47 crc kubenswrapper[4792]: I0929 18:58:47.982353 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator"/"kube-storage-version-migrator-sa-dockercfg-5xfcg" Sep 29 18:58:47 crc kubenswrapper[4792]: I0929 18:58:47.982387 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"kube-root-ca.crt" Sep 29 18:58:47 crc kubenswrapper[4792]: I0929 18:58:47.997289 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-l44c7"] Sep 29 18:58:47 crc kubenswrapper[4792]: I0929 18:58:47.997827 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-config-operator/openshift-config-operator-7777fb866f-ktzhf" Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.010765 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"ingress-operator-dockercfg-7lnqk" Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.011031 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ac-dockercfg-9lkdf" Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.011154 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"openshift-service-ca.crt" Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.011338 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-admission-controller-secret" Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.031270 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"marketplace-trusted-ca" Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.032254 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"installation-pull-secrets" Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.032506 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"metrics-tls" Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.033308 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.033351 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"openshift-service-ca.crt" Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.037294 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"trusted-ca" Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.037528 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.037790 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"openshift-config-operator-dockercfg-7pc5z" Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.037966 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"registry-dockercfg-kzzsd" Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.038326 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-tls" Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.038606 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"kube-root-ca.crt" Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.041197 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-8wffr"] Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.041939 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-r647z"] Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.043105 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-r647z" Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.043431 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-8wffr" Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.044548 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"config-operator-serving-cert" Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.045128 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"kube-root-ca.crt" Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.045636 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/2b5b8e64-d53b-407e-a10f-d4fed5afd70c-etcd-client\") pod \"apiserver-7bbb656c7d-vmw2h\" (UID: \"2b5b8e64-d53b-407e-a10f-d4fed5afd70c\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-vmw2h" Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.045661 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dzrsr\" (UniqueName: \"kubernetes.io/projected/2b5b8e64-d53b-407e-a10f-d4fed5afd70c-kube-api-access-dzrsr\") pod \"apiserver-7bbb656c7d-vmw2h\" (UID: \"2b5b8e64-d53b-407e-a10f-d4fed5afd70c\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-vmw2h" Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.045685 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lhjtz\" (UniqueName: \"kubernetes.io/projected/0d4ea471-e5cc-4571-9d2d-baab1747a457-kube-api-access-lhjtz\") pod \"migrator-59844c95c7-sqpsg\" (UID: \"0d4ea471-e5cc-4571-9d2d-baab1747a457\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-sqpsg" Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.045704 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/1b36a633-3ac6-4670-aa21-b5e3f750484f-stats-auth\") pod \"router-default-5444994796-ldssp\" (UID: \"1b36a633-3ac6-4670-aa21-b5e3f750484f\") " pod="openshift-ingress/router-default-5444994796-ldssp" Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.045724 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3a5115bb-23d8-4ff0-9c56-419450cd87fe-serving-cert\") pod \"controller-manager-879f6c89f-p9pds\" (UID: \"3a5115bb-23d8-4ff0-9c56-419450cd87fe\") " pod="openshift-controller-manager/controller-manager-879f6c89f-p9pds" Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.045740 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/2b5b8e64-d53b-407e-a10f-d4fed5afd70c-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-vmw2h\" (UID: \"2b5b8e64-d53b-407e-a10f-d4fed5afd70c\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-vmw2h" Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.045756 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/502b8b77-8c80-4cc0-8590-6fb9ce342289-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-6r69d\" (UID: \"502b8b77-8c80-4cc0-8590-6fb9ce342289\") " pod="openshift-marketplace/marketplace-operator-79b997595-6r69d" Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.045773 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/4b90ed41-b2cd-4525-b5e2-11513ee0c763-audit\") pod \"apiserver-76f77b778f-4bgtx\" (UID: \"4b90ed41-b2cd-4525-b5e2-11513ee0c763\") " pod="openshift-apiserver/apiserver-76f77b778f-4bgtx" Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.045796 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/53064b9c-6401-4332-b64a-b8cbc84ae37c-client-ca\") pod \"route-controller-manager-6576b87f9c-l44c7\" (UID: \"53064b9c-6401-4332-b64a-b8cbc84ae37c\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-l44c7" Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.045813 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wgwqq\" (UniqueName: \"kubernetes.io/projected/3a5115bb-23d8-4ff0-9c56-419450cd87fe-kube-api-access-wgwqq\") pod \"controller-manager-879f6c89f-p9pds\" (UID: \"3a5115bb-23d8-4ff0-9c56-419450cd87fe\") " pod="openshift-controller-manager/controller-manager-879f6c89f-p9pds" Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.045828 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ada0d65e-bf4b-40ec-a03f-d0009526f8b6-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-t9hk9\" (UID: \"ada0d65e-bf4b-40ec-a03f-d0009526f8b6\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-t9hk9" Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.045866 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7f2a0e30-8d34-4540-b7b5-99db8dc99d05-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-x758z\" (UID: \"7f2a0e30-8d34-4540-b7b5-99db8dc99d05\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-x758z" Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.045884 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/2b5b8e64-d53b-407e-a10f-d4fed5afd70c-audit-dir\") pod \"apiserver-7bbb656c7d-vmw2h\" (UID: \"2b5b8e64-d53b-407e-a10f-d4fed5afd70c\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-vmw2h" Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.045893 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"trusted-ca" Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.045899 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ada0d65e-bf4b-40ec-a03f-d0009526f8b6-config\") pod \"openshift-apiserver-operator-796bbdcf4f-t9hk9\" (UID: \"ada0d65e-bf4b-40ec-a03f-d0009526f8b6\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-t9hk9" Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.046259 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fcd90e7a-6ae8-4065-bcab-5d3f8fffc5a1-auth-proxy-config\") pod \"machine-approver-56656f9798-5j6xh\" (UID: \"fcd90e7a-6ae8-4065-bcab-5d3f8fffc5a1\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-5j6xh" Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.046283 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/e3ff0f1d-5141-47e0-b414-db59edba635c-auth-proxy-config\") pod \"machine-config-operator-74547568cd-b89qf\" (UID: \"e3ff0f1d-5141-47e0-b414-db59edba635c\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-b89qf" Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.046300 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a09c3bef-aa53-4bb8-9cf1-b691a3276ed4-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-d27m9\" (UID: \"a09c3bef-aa53-4bb8-9cf1-b691a3276ed4\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-d27m9" Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.046315 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/1b36a633-3ac6-4670-aa21-b5e3f750484f-default-certificate\") pod \"router-default-5444994796-ldssp\" (UID: \"1b36a633-3ac6-4670-aa21-b5e3f750484f\") " pod="openshift-ingress/router-default-5444994796-ldssp" Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.046346 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/3a5115bb-23d8-4ff0-9c56-419450cd87fe-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-p9pds\" (UID: \"3a5115bb-23d8-4ff0-9c56-419450cd87fe\") " pod="openshift-controller-manager/controller-manager-879f6c89f-p9pds" Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.046373 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5d25q\" (UniqueName: \"kubernetes.io/projected/4b90ed41-b2cd-4525-b5e2-11513ee0c763-kube-api-access-5d25q\") pod \"apiserver-76f77b778f-4bgtx\" (UID: \"4b90ed41-b2cd-4525-b5e2-11513ee0c763\") " pod="openshift-apiserver/apiserver-76f77b778f-4bgtx" Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.046394 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k2qbd\" (UniqueName: \"kubernetes.io/projected/56300584-499b-4d05-ada3-93dade9c9d9e-kube-api-access-k2qbd\") pod \"catalog-operator-68c6474976-p4lzh\" (UID: \"56300584-499b-4d05-ada3-93dade9c9d9e\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-p4lzh" Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.046412 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-446wq\" (UniqueName: \"kubernetes.io/projected/e3ff0f1d-5141-47e0-b414-db59edba635c-kube-api-access-446wq\") pod \"machine-config-operator-74547568cd-b89qf\" (UID: \"e3ff0f1d-5141-47e0-b414-db59edba635c\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-b89qf" Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.046433 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/837e9a61-a894-44bf-981a-1bfae662e1e8-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-nv8fj\" (UID: \"837e9a61-a894-44bf-981a-1bfae662e1e8\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-nv8fj" Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.046448 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w8x92\" (UniqueName: \"kubernetes.io/projected/c24d14ff-9dd4-47e0-9a5c-3e03e88b9aee-kube-api-access-w8x92\") pod \"olm-operator-6b444d44fb-w22fq\" (UID: \"c24d14ff-9dd4-47e0-9a5c-3e03e88b9aee\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-w22fq" Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.046465 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/6b7b0964-7f17-4f2f-8a3f-f5e5171fec41-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-bnclj\" (UID: \"6b7b0964-7f17-4f2f-8a3f-f5e5171fec41\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-bnclj" Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.046467 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ada0d65e-bf4b-40ec-a03f-d0009526f8b6-config\") pod \"openshift-apiserver-operator-796bbdcf4f-t9hk9\" (UID: \"ada0d65e-bf4b-40ec-a03f-d0009526f8b6\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-t9hk9" Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.046489 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/1a5d2b1d-2a67-490d-8f55-45a7a0219457-tmpfs\") pod \"packageserver-d55dfcdfc-vt754\" (UID: \"1a5d2b1d-2a67-490d-8f55-45a7a0219457\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-vt754" Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.065141 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/aa7d4456-c058-4b02-bdf7-4ea41d52e777-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-h2hml\" (UID: \"aa7d4456-c058-4b02-bdf7-4ea41d52e777\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-h2hml" Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.065200 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/4b90ed41-b2cd-4525-b5e2-11513ee0c763-trusted-ca-bundle\") pod \"apiserver-76f77b778f-4bgtx\" (UID: \"4b90ed41-b2cd-4525-b5e2-11513ee0c763\") " pod="openshift-apiserver/apiserver-76f77b778f-4bgtx" Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.065249 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4b90ed41-b2cd-4525-b5e2-11513ee0c763-config\") pod \"apiserver-76f77b778f-4bgtx\" (UID: \"4b90ed41-b2cd-4525-b5e2-11513ee0c763\") " pod="openshift-apiserver/apiserver-76f77b778f-4bgtx" Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.065291 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5n95f\" (UniqueName: \"kubernetes.io/projected/ada0d65e-bf4b-40ec-a03f-d0009526f8b6-kube-api-access-5n95f\") pod \"openshift-apiserver-operator-796bbdcf4f-t9hk9\" (UID: \"ada0d65e-bf4b-40ec-a03f-d0009526f8b6\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-t9hk9" Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.065317 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/53064b9c-6401-4332-b64a-b8cbc84ae37c-serving-cert\") pod \"route-controller-manager-6576b87f9c-l44c7\" (UID: \"53064b9c-6401-4332-b64a-b8cbc84ae37c\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-l44c7" Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.065347 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cwwrl\" (UniqueName: \"kubernetes.io/projected/ea34b349-a47a-4632-9fc6-b86e0d606e54-kube-api-access-cwwrl\") pod \"package-server-manager-789f6589d5-bfnfv\" (UID: \"ea34b349-a47a-4632-9fc6-b86e0d606e54\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-bfnfv" Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.065372 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rs2wt\" (UniqueName: \"kubernetes.io/projected/1b36a633-3ac6-4670-aa21-b5e3f750484f-kube-api-access-rs2wt\") pod \"router-default-5444994796-ldssp\" (UID: \"1b36a633-3ac6-4670-aa21-b5e3f750484f\") " pod="openshift-ingress/router-default-5444994796-ldssp" Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.065402 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/e3ff0f1d-5141-47e0-b414-db59edba635c-images\") pod \"machine-config-operator-74547568cd-b89qf\" (UID: \"e3ff0f1d-5141-47e0-b414-db59edba635c\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-b89qf" Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.065424 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/aa7d4456-c058-4b02-bdf7-4ea41d52e777-config\") pod \"kube-apiserver-operator-766d6c64bb-h2hml\" (UID: \"aa7d4456-c058-4b02-bdf7-4ea41d52e777\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-h2hml" Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.065448 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/fcd90e7a-6ae8-4065-bcab-5d3f8fffc5a1-machine-approver-tls\") pod \"machine-approver-56656f9798-5j6xh\" (UID: \"fcd90e7a-6ae8-4065-bcab-5d3f8fffc5a1\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-5j6xh" Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.065473 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vsdl6\" (UniqueName: \"kubernetes.io/projected/fcd90e7a-6ae8-4065-bcab-5d3f8fffc5a1-kube-api-access-vsdl6\") pod \"machine-approver-56656f9798-5j6xh\" (UID: \"fcd90e7a-6ae8-4065-bcab-5d3f8fffc5a1\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-5j6xh" Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.065499 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/1a5d2b1d-2a67-490d-8f55-45a7a0219457-webhook-cert\") pod \"packageserver-d55dfcdfc-vt754\" (UID: \"1a5d2b1d-2a67-490d-8f55-45a7a0219457\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-vt754" Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.065534 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/2b5b8e64-d53b-407e-a10f-d4fed5afd70c-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-vmw2h\" (UID: \"2b5b8e64-d53b-407e-a10f-d4fed5afd70c\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-vmw2h" Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.065556 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/e3ff0f1d-5141-47e0-b414-db59edba635c-proxy-tls\") pod \"machine-config-operator-74547568cd-b89qf\" (UID: \"e3ff0f1d-5141-47e0-b414-db59edba635c\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-b89qf" Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.065575 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a09c3bef-aa53-4bb8-9cf1-b691a3276ed4-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-d27m9\" (UID: \"a09c3bef-aa53-4bb8-9cf1-b691a3276ed4\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-d27m9" Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.065594 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mk9j7\" (UniqueName: \"kubernetes.io/projected/a09c3bef-aa53-4bb8-9cf1-b691a3276ed4-kube-api-access-mk9j7\") pod \"kube-storage-version-migrator-operator-b67b599dd-d27m9\" (UID: \"a09c3bef-aa53-4bb8-9cf1-b691a3276ed4\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-d27m9" Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.065613 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/1b36a633-3ac6-4670-aa21-b5e3f750484f-metrics-certs\") pod \"router-default-5444994796-ldssp\" (UID: \"1b36a633-3ac6-4670-aa21-b5e3f750484f\") " pod="openshift-ingress/router-default-5444994796-ldssp" Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.065643 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/4b90ed41-b2cd-4525-b5e2-11513ee0c763-etcd-serving-ca\") pod \"apiserver-76f77b778f-4bgtx\" (UID: \"4b90ed41-b2cd-4525-b5e2-11513ee0c763\") " pod="openshift-apiserver/apiserver-76f77b778f-4bgtx" Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.065663 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4b90ed41-b2cd-4525-b5e2-11513ee0c763-serving-cert\") pod \"apiserver-76f77b778f-4bgtx\" (UID: \"4b90ed41-b2cd-4525-b5e2-11513ee0c763\") " pod="openshift-apiserver/apiserver-76f77b778f-4bgtx" Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.065685 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/837e9a61-a894-44bf-981a-1bfae662e1e8-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-nv8fj\" (UID: \"837e9a61-a894-44bf-981a-1bfae662e1e8\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-nv8fj" Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.065709 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6b7b0964-7f17-4f2f-8a3f-f5e5171fec41-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-bnclj\" (UID: \"6b7b0964-7f17-4f2f-8a3f-f5e5171fec41\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-bnclj" Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.065741 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/aa7d4456-c058-4b02-bdf7-4ea41d52e777-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-h2hml\" (UID: \"aa7d4456-c058-4b02-bdf7-4ea41d52e777\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-h2hml" Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.065765 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/4b90ed41-b2cd-4525-b5e2-11513ee0c763-audit-dir\") pod \"apiserver-76f77b778f-4bgtx\" (UID: \"4b90ed41-b2cd-4525-b5e2-11513ee0c763\") " pod="openshift-apiserver/apiserver-76f77b778f-4bgtx" Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.065792 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4bktj\" (UniqueName: \"kubernetes.io/projected/53064b9c-6401-4332-b64a-b8cbc84ae37c-kube-api-access-4bktj\") pod \"route-controller-manager-6576b87f9c-l44c7\" (UID: \"53064b9c-6401-4332-b64a-b8cbc84ae37c\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-l44c7" Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.065813 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/c24d14ff-9dd4-47e0-9a5c-3e03e88b9aee-srv-cert\") pod \"olm-operator-6b444d44fb-w22fq\" (UID: \"c24d14ff-9dd4-47e0-9a5c-3e03e88b9aee\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-w22fq" Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.065839 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/4b90ed41-b2cd-4525-b5e2-11513ee0c763-node-pullsecrets\") pod \"apiserver-76f77b778f-4bgtx\" (UID: \"4b90ed41-b2cd-4525-b5e2-11513ee0c763\") " pod="openshift-apiserver/apiserver-76f77b778f-4bgtx" Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.065887 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/4b90ed41-b2cd-4525-b5e2-11513ee0c763-image-import-ca\") pod \"apiserver-76f77b778f-4bgtx\" (UID: \"4b90ed41-b2cd-4525-b5e2-11513ee0c763\") " pod="openshift-apiserver/apiserver-76f77b778f-4bgtx" Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.065910 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/4b90ed41-b2cd-4525-b5e2-11513ee0c763-encryption-config\") pod \"apiserver-76f77b778f-4bgtx\" (UID: \"4b90ed41-b2cd-4525-b5e2-11513ee0c763\") " pod="openshift-apiserver/apiserver-76f77b778f-4bgtx" Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.065933 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/56300584-499b-4d05-ada3-93dade9c9d9e-srv-cert\") pod \"catalog-operator-68c6474976-p4lzh\" (UID: \"56300584-499b-4d05-ada3-93dade9c9d9e\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-p4lzh" Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.065954 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/ea34b349-a47a-4632-9fc6-b86e0d606e54-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-bfnfv\" (UID: \"ea34b349-a47a-4632-9fc6-b86e0d606e54\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-bfnfv" Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.065977 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1b36a633-3ac6-4670-aa21-b5e3f750484f-service-ca-bundle\") pod \"router-default-5444994796-ldssp\" (UID: \"1b36a633-3ac6-4670-aa21-b5e3f750484f\") " pod="openshift-ingress/router-default-5444994796-ldssp" Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.065998 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/4b90ed41-b2cd-4525-b5e2-11513ee0c763-etcd-client\") pod \"apiserver-76f77b778f-4bgtx\" (UID: \"4b90ed41-b2cd-4525-b5e2-11513ee0c763\") " pod="openshift-apiserver/apiserver-76f77b778f-4bgtx" Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.066018 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s9zsl\" (UniqueName: \"kubernetes.io/projected/1a5d2b1d-2a67-490d-8f55-45a7a0219457-kube-api-access-s9zsl\") pod \"packageserver-d55dfcdfc-vt754\" (UID: \"1a5d2b1d-2a67-490d-8f55-45a7a0219457\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-vt754" Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.066040 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/2b5b8e64-d53b-407e-a10f-d4fed5afd70c-encryption-config\") pod \"apiserver-7bbb656c7d-vmw2h\" (UID: \"2b5b8e64-d53b-407e-a10f-d4fed5afd70c\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-vmw2h" Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.066060 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fcd90e7a-6ae8-4065-bcab-5d3f8fffc5a1-config\") pod \"machine-approver-56656f9798-5j6xh\" (UID: \"fcd90e7a-6ae8-4065-bcab-5d3f8fffc5a1\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-5j6xh" Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.066083 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hj5jh\" (UniqueName: \"kubernetes.io/projected/502b8b77-8c80-4cc0-8590-6fb9ce342289-kube-api-access-hj5jh\") pod \"marketplace-operator-79b997595-6r69d\" (UID: \"502b8b77-8c80-4cc0-8590-6fb9ce342289\") " pod="openshift-marketplace/marketplace-operator-79b997595-6r69d" Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.066105 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7f2a0e30-8d34-4540-b7b5-99db8dc99d05-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-x758z\" (UID: \"7f2a0e30-8d34-4540-b7b5-99db8dc99d05\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-x758z" Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.066139 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/3a5115bb-23d8-4ff0-9c56-419450cd87fe-client-ca\") pod \"controller-manager-879f6c89f-p9pds\" (UID: \"3a5115bb-23d8-4ff0-9c56-419450cd87fe\") " pod="openshift-controller-manager/controller-manager-879f6c89f-p9pds" Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.066164 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/837e9a61-a894-44bf-981a-1bfae662e1e8-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-nv8fj\" (UID: \"837e9a61-a894-44bf-981a-1bfae662e1e8\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-nv8fj" Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.066189 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/2b5b8e64-d53b-407e-a10f-d4fed5afd70c-audit-policies\") pod \"apiserver-7bbb656c7d-vmw2h\" (UID: \"2b5b8e64-d53b-407e-a10f-d4fed5afd70c\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-vmw2h" Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.066218 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xd799\" (UniqueName: \"kubernetes.io/projected/837e9a61-a894-44bf-981a-1bfae662e1e8-kube-api-access-xd799\") pod \"cluster-image-registry-operator-dc59b4c8b-nv8fj\" (UID: \"837e9a61-a894-44bf-981a-1bfae662e1e8\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-nv8fj" Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.066241 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ada0d65e-bf4b-40ec-a03f-d0009526f8b6-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-t9hk9\" (UID: \"ada0d65e-bf4b-40ec-a03f-d0009526f8b6\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-t9hk9" Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.048248 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"kube-root-ca.crt" Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.066255 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/56300584-499b-4d05-ada3-93dade9c9d9e-profile-collector-cert\") pod \"catalog-operator-68c6474976-p4lzh\" (UID: \"56300584-499b-4d05-ada3-93dade9c9d9e\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-p4lzh" Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.068318 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/7f2a0e30-8d34-4540-b7b5-99db8dc99d05-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-x758z\" (UID: \"7f2a0e30-8d34-4540-b7b5-99db8dc99d05\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-x758z" Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.068366 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3a5115bb-23d8-4ff0-9c56-419450cd87fe-config\") pod \"controller-manager-879f6c89f-p9pds\" (UID: \"3a5115bb-23d8-4ff0-9c56-419450cd87fe\") " pod="openshift-controller-manager/controller-manager-879f6c89f-p9pds" Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.068392 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/c24d14ff-9dd4-47e0-9a5c-3e03e88b9aee-profile-collector-cert\") pod \"olm-operator-6b444d44fb-w22fq\" (UID: \"c24d14ff-9dd4-47e0-9a5c-3e03e88b9aee\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-w22fq" Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.068422 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6b7b0964-7f17-4f2f-8a3f-f5e5171fec41-config\") pod \"kube-controller-manager-operator-78b949d7b-bnclj\" (UID: \"6b7b0964-7f17-4f2f-8a3f-f5e5171fec41\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-bnclj" Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.068458 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/2b5b8e64-d53b-407e-a10f-d4fed5afd70c-serving-cert\") pod \"apiserver-7bbb656c7d-vmw2h\" (UID: \"2b5b8e64-d53b-407e-a10f-d4fed5afd70c\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-vmw2h" Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.068488 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/53064b9c-6401-4332-b64a-b8cbc84ae37c-config\") pod \"route-controller-manager-6576b87f9c-l44c7\" (UID: \"53064b9c-6401-4332-b64a-b8cbc84ae37c\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-l44c7" Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.068512 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/502b8b77-8c80-4cc0-8590-6fb9ce342289-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-6r69d\" (UID: \"502b8b77-8c80-4cc0-8590-6fb9ce342289\") " pod="openshift-marketplace/marketplace-operator-79b997595-6r69d" Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.068537 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/1a5d2b1d-2a67-490d-8f55-45a7a0219457-apiservice-cert\") pod \"packageserver-d55dfcdfc-vt754\" (UID: \"1a5d2b1d-2a67-490d-8f55-45a7a0219457\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-vt754" Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.049351 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/4b90ed41-b2cd-4525-b5e2-11513ee0c763-audit\") pod \"apiserver-76f77b778f-4bgtx\" (UID: \"4b90ed41-b2cd-4525-b5e2-11513ee0c763\") " pod="openshift-apiserver/apiserver-76f77b778f-4bgtx" Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.067519 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/4b90ed41-b2cd-4525-b5e2-11513ee0c763-trusted-ca-bundle\") pod \"apiserver-76f77b778f-4bgtx\" (UID: \"4b90ed41-b2cd-4525-b5e2-11513ee0c763\") " pod="openshift-apiserver/apiserver-76f77b778f-4bgtx" Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.050463 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fcd90e7a-6ae8-4065-bcab-5d3f8fffc5a1-auth-proxy-config\") pod \"machine-approver-56656f9798-5j6xh\" (UID: \"fcd90e7a-6ae8-4065-bcab-5d3f8fffc5a1\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-5j6xh" Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.053532 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/3a5115bb-23d8-4ff0-9c56-419450cd87fe-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-p9pds\" (UID: \"3a5115bb-23d8-4ff0-9c56-419450cd87fe\") " pod="openshift-controller-manager/controller-manager-879f6c89f-p9pds" Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.049988 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/53064b9c-6401-4332-b64a-b8cbc84ae37c-client-ca\") pod \"route-controller-manager-6576b87f9c-l44c7\" (UID: \"53064b9c-6401-4332-b64a-b8cbc84ae37c\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-l44c7" Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.067920 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4b90ed41-b2cd-4525-b5e2-11513ee0c763-config\") pod \"apiserver-76f77b778f-4bgtx\" (UID: \"4b90ed41-b2cd-4525-b5e2-11513ee0c763\") " pod="openshift-apiserver/apiserver-76f77b778f-4bgtx" Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.076465 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-lwr4w"] Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.091618 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-t9hk9"] Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.091724 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-sp272"] Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.092326 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-sp272" Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.076547 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/4b90ed41-b2cd-4525-b5e2-11513ee0c763-audit-dir\") pod \"apiserver-76f77b778f-4bgtx\" (UID: \"4b90ed41-b2cd-4525-b5e2-11513ee0c763\") " pod="openshift-apiserver/apiserver-76f77b778f-4bgtx" Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.080281 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"openshift-service-ca.crt" Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.093129 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/machine-api-operator-5694c8668f-lwr4w" Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.053617 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/2b5b8e64-d53b-407e-a10f-d4fed5afd70c-audit-dir\") pod \"apiserver-7bbb656c7d-vmw2h\" (UID: \"2b5b8e64-d53b-407e-a10f-d4fed5afd70c\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-vmw2h" Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.083535 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/4b90ed41-b2cd-4525-b5e2-11513ee0c763-etcd-serving-ca\") pod \"apiserver-76f77b778f-4bgtx\" (UID: \"4b90ed41-b2cd-4525-b5e2-11513ee0c763\") " pod="openshift-apiserver/apiserver-76f77b778f-4bgtx" Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.084125 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/3a5115bb-23d8-4ff0-9c56-419450cd87fe-client-ca\") pod \"controller-manager-879f6c89f-p9pds\" (UID: \"3a5115bb-23d8-4ff0-9c56-419450cd87fe\") " pod="openshift-controller-manager/controller-manager-879f6c89f-p9pds" Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.084968 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/53064b9c-6401-4332-b64a-b8cbc84ae37c-config\") pod \"route-controller-manager-6576b87f9c-l44c7\" (UID: \"53064b9c-6401-4332-b64a-b8cbc84ae37c\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-l44c7" Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.086545 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fcd90e7a-6ae8-4065-bcab-5d3f8fffc5a1-config\") pod \"machine-approver-56656f9798-5j6xh\" (UID: \"fcd90e7a-6ae8-4065-bcab-5d3f8fffc5a1\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-5j6xh" Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.086630 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3a5115bb-23d8-4ff0-9c56-419450cd87fe-config\") pod \"controller-manager-879f6c89f-p9pds\" (UID: \"3a5115bb-23d8-4ff0-9c56-419450cd87fe\") " pod="openshift-controller-manager/controller-manager-879f6c89f-p9pds" Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.054310 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/2b5b8e64-d53b-407e-a10f-d4fed5afd70c-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-vmw2h\" (UID: \"2b5b8e64-d53b-407e-a10f-d4fed5afd70c\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-vmw2h" Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.094727 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4b90ed41-b2cd-4525-b5e2-11513ee0c763-serving-cert\") pod \"apiserver-76f77b778f-4bgtx\" (UID: \"4b90ed41-b2cd-4525-b5e2-11513ee0c763\") " pod="openshift-apiserver/apiserver-76f77b778f-4bgtx" Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.095104 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/2b5b8e64-d53b-407e-a10f-d4fed5afd70c-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-vmw2h\" (UID: \"2b5b8e64-d53b-407e-a10f-d4fed5afd70c\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-vmw2h" Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.095536 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/2b5b8e64-d53b-407e-a10f-d4fed5afd70c-audit-policies\") pod \"apiserver-7bbb656c7d-vmw2h\" (UID: \"2b5b8e64-d53b-407e-a10f-d4fed5afd70c\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-vmw2h" Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.095929 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-dockercfg-vw8fw" Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.079710 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/2b5b8e64-d53b-407e-a10f-d4fed5afd70c-etcd-client\") pod \"apiserver-7bbb656c7d-vmw2h\" (UID: \"2b5b8e64-d53b-407e-a10f-d4fed5afd70c\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-vmw2h" Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.080569 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/4b90ed41-b2cd-4525-b5e2-11513ee0c763-node-pullsecrets\") pod \"apiserver-76f77b778f-4bgtx\" (UID: \"4b90ed41-b2cd-4525-b5e2-11513ee0c763\") " pod="openshift-apiserver/apiserver-76f77b778f-4bgtx" Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.081578 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/4b90ed41-b2cd-4525-b5e2-11513ee0c763-image-import-ca\") pod \"apiserver-76f77b778f-4bgtx\" (UID: \"4b90ed41-b2cd-4525-b5e2-11513ee0c763\") " pod="openshift-apiserver/apiserver-76f77b778f-4bgtx" Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.099627 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-w22fq"] Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.100914 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/4b90ed41-b2cd-4525-b5e2-11513ee0c763-encryption-config\") pod \"apiserver-76f77b778f-4bgtx\" (UID: \"4b90ed41-b2cd-4525-b5e2-11513ee0c763\") " pod="openshift-apiserver/apiserver-76f77b778f-4bgtx" Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.106829 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3a5115bb-23d8-4ff0-9c56-419450cd87fe-serving-cert\") pod \"controller-manager-879f6c89f-p9pds\" (UID: \"3a5115bb-23d8-4ff0-9c56-419450cd87fe\") " pod="openshift-controller-manager/controller-manager-879f6c89f-p9pds" Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.108403 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/fcd90e7a-6ae8-4065-bcab-5d3f8fffc5a1-machine-approver-tls\") pod \"machine-approver-56656f9798-5j6xh\" (UID: \"fcd90e7a-6ae8-4065-bcab-5d3f8fffc5a1\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-5j6xh" Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.115292 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-serving-cert" Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.116658 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/2b5b8e64-d53b-407e-a10f-d4fed5afd70c-encryption-config\") pod \"apiserver-7bbb656c7d-vmw2h\" (UID: \"2b5b8e64-d53b-407e-a10f-d4fed5afd70c\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-vmw2h" Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.117036 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/2b5b8e64-d53b-407e-a10f-d4fed5afd70c-serving-cert\") pod \"apiserver-7bbb656c7d-vmw2h\" (UID: \"2b5b8e64-d53b-407e-a10f-d4fed5afd70c\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-vmw2h" Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.117397 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/4b90ed41-b2cd-4525-b5e2-11513ee0c763-etcd-client\") pod \"apiserver-76f77b778f-4bgtx\" (UID: \"4b90ed41-b2cd-4525-b5e2-11513ee0c763\") " pod="openshift-apiserver/apiserver-76f77b778f-4bgtx" Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.119944 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-bp8h2"] Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.120516 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console-operator/console-operator-58897d9998-bpj7h"] Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.120879 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication-operator/authentication-operator-69f744f599-bp8h2" Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.120986 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-857r4"] Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.121331 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console-operator/console-operator-58897d9998-bpj7h" Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.121443 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-857r4" Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.122734 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-p9pds"] Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.124759 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-ln92w"] Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.125192 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca-operator/service-ca-operator-777779d784-ln92w" Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.126568 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/53064b9c-6401-4332-b64a-b8cbc84ae37c-serving-cert\") pod \"route-controller-manager-6576b87f9c-l44c7\" (UID: \"53064b9c-6401-4332-b64a-b8cbc84ae37c\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-l44c7" Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.132466 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-service-ca.crt" Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.134111 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-t4h92"] Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.134710 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca/service-ca-9c57cc56f-t4h92" Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.136123 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/downloads-7954f5f757-7phmq"] Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.136469 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/downloads-7954f5f757-7phmq" Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.139138 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/console-f9d7485db-7pt7w"] Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.139472 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-w569t"] Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.139795 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd-operator/etcd-operator-b45778765-w569t" Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.139984 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-7pt7w" Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.150092 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-config" Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.151165 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-4pj4c"] Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.151830 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-4pj4c" Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.154195 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-vw7wd"] Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.155612 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-4bgtx"] Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.155744 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns-operator/dns-operator-744455d44c-vw7wd" Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.156901 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress-canary/ingress-canary-f6x5z"] Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.169959 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/837e9a61-a894-44bf-981a-1bfae662e1e8-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-nv8fj\" (UID: \"837e9a61-a894-44bf-981a-1bfae662e1e8\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-nv8fj" Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.170002 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/59997681-59ad-46c1-b61e-5206099176d6-trusted-ca\") pod \"ingress-operator-5b745b69d9-ljgr4\" (UID: \"59997681-59ad-46c1-b61e-5206099176d6\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-ljgr4" Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.170034 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xd799\" (UniqueName: \"kubernetes.io/projected/837e9a61-a894-44bf-981a-1bfae662e1e8-kube-api-access-xd799\") pod \"cluster-image-registry-operator-dc59b4c8b-nv8fj\" (UID: \"837e9a61-a894-44bf-981a-1bfae662e1e8\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-nv8fj" Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.170059 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/56300584-499b-4d05-ada3-93dade9c9d9e-profile-collector-cert\") pod \"catalog-operator-68c6474976-p4lzh\" (UID: \"56300584-499b-4d05-ada3-93dade9c9d9e\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-p4lzh" Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.170079 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/7f2a0e30-8d34-4540-b7b5-99db8dc99d05-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-x758z\" (UID: \"7f2a0e30-8d34-4540-b7b5-99db8dc99d05\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-x758z" Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.170097 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/c24d14ff-9dd4-47e0-9a5c-3e03e88b9aee-profile-collector-cert\") pod \"olm-operator-6b444d44fb-w22fq\" (UID: \"c24d14ff-9dd4-47e0-9a5c-3e03e88b9aee\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-w22fq" Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.170119 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6b7b0964-7f17-4f2f-8a3f-f5e5171fec41-config\") pod \"kube-controller-manager-operator-78b949d7b-bnclj\" (UID: \"6b7b0964-7f17-4f2f-8a3f-f5e5171fec41\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-bnclj" Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.170142 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/502b8b77-8c80-4cc0-8590-6fb9ce342289-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-6r69d\" (UID: \"502b8b77-8c80-4cc0-8590-6fb9ce342289\") " pod="openshift-marketplace/marketplace-operator-79b997595-6r69d" Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.170163 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/1a5d2b1d-2a67-490d-8f55-45a7a0219457-apiservice-cert\") pod \"packageserver-d55dfcdfc-vt754\" (UID: \"1a5d2b1d-2a67-490d-8f55-45a7a0219457\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-vt754" Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.170208 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lhjtz\" (UniqueName: \"kubernetes.io/projected/0d4ea471-e5cc-4571-9d2d-baab1747a457-kube-api-access-lhjtz\") pod \"migrator-59844c95c7-sqpsg\" (UID: \"0d4ea471-e5cc-4571-9d2d-baab1747a457\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-sqpsg" Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.170229 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/1b36a633-3ac6-4670-aa21-b5e3f750484f-stats-auth\") pod \"router-default-5444994796-ldssp\" (UID: \"1b36a633-3ac6-4670-aa21-b5e3f750484f\") " pod="openshift-ingress/router-default-5444994796-ldssp" Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.170252 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/59997681-59ad-46c1-b61e-5206099176d6-bound-sa-token\") pod \"ingress-operator-5b745b69d9-ljgr4\" (UID: \"59997681-59ad-46c1-b61e-5206099176d6\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-ljgr4" Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.170279 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/502b8b77-8c80-4cc0-8590-6fb9ce342289-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-6r69d\" (UID: \"502b8b77-8c80-4cc0-8590-6fb9ce342289\") " pod="openshift-marketplace/marketplace-operator-79b997595-6r69d" Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.170309 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7f2a0e30-8d34-4540-b7b5-99db8dc99d05-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-x758z\" (UID: \"7f2a0e30-8d34-4540-b7b5-99db8dc99d05\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-x758z" Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.170332 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l8kcf\" (UniqueName: \"kubernetes.io/projected/59997681-59ad-46c1-b61e-5206099176d6-kube-api-access-l8kcf\") pod \"ingress-operator-5b745b69d9-ljgr4\" (UID: \"59997681-59ad-46c1-b61e-5206099176d6\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-ljgr4" Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.170365 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/e3ff0f1d-5141-47e0-b414-db59edba635c-auth-proxy-config\") pod \"machine-config-operator-74547568cd-b89qf\" (UID: \"e3ff0f1d-5141-47e0-b414-db59edba635c\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-b89qf" Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.170388 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a09c3bef-aa53-4bb8-9cf1-b691a3276ed4-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-d27m9\" (UID: \"a09c3bef-aa53-4bb8-9cf1-b691a3276ed4\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-d27m9" Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.170411 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/1b36a633-3ac6-4670-aa21-b5e3f750484f-default-certificate\") pod \"router-default-5444994796-ldssp\" (UID: \"1b36a633-3ac6-4670-aa21-b5e3f750484f\") " pod="openshift-ingress/router-default-5444994796-ldssp" Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.170439 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-446wq\" (UniqueName: \"kubernetes.io/projected/e3ff0f1d-5141-47e0-b414-db59edba635c-kube-api-access-446wq\") pod \"machine-config-operator-74547568cd-b89qf\" (UID: \"e3ff0f1d-5141-47e0-b414-db59edba635c\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-b89qf" Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.170462 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k2qbd\" (UniqueName: \"kubernetes.io/projected/56300584-499b-4d05-ada3-93dade9c9d9e-kube-api-access-k2qbd\") pod \"catalog-operator-68c6474976-p4lzh\" (UID: \"56300584-499b-4d05-ada3-93dade9c9d9e\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-p4lzh" Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.170491 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/837e9a61-a894-44bf-981a-1bfae662e1e8-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-nv8fj\" (UID: \"837e9a61-a894-44bf-981a-1bfae662e1e8\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-nv8fj" Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.170511 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w8x92\" (UniqueName: \"kubernetes.io/projected/c24d14ff-9dd4-47e0-9a5c-3e03e88b9aee-kube-api-access-w8x92\") pod \"olm-operator-6b444d44fb-w22fq\" (UID: \"c24d14ff-9dd4-47e0-9a5c-3e03e88b9aee\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-w22fq" Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.170530 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/6b7b0964-7f17-4f2f-8a3f-f5e5171fec41-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-bnclj\" (UID: \"6b7b0964-7f17-4f2f-8a3f-f5e5171fec41\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-bnclj" Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.170562 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/1a5d2b1d-2a67-490d-8f55-45a7a0219457-tmpfs\") pod \"packageserver-d55dfcdfc-vt754\" (UID: \"1a5d2b1d-2a67-490d-8f55-45a7a0219457\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-vt754" Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.170582 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/aa7d4456-c058-4b02-bdf7-4ea41d52e777-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-h2hml\" (UID: \"aa7d4456-c058-4b02-bdf7-4ea41d52e777\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-h2hml" Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.170622 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cwwrl\" (UniqueName: \"kubernetes.io/projected/ea34b349-a47a-4632-9fc6-b86e0d606e54-kube-api-access-cwwrl\") pod \"package-server-manager-789f6589d5-bfnfv\" (UID: \"ea34b349-a47a-4632-9fc6-b86e0d606e54\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-bfnfv" Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.170643 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rs2wt\" (UniqueName: \"kubernetes.io/projected/1b36a633-3ac6-4670-aa21-b5e3f750484f-kube-api-access-rs2wt\") pod \"router-default-5444994796-ldssp\" (UID: \"1b36a633-3ac6-4670-aa21-b5e3f750484f\") " pod="openshift-ingress/router-default-5444994796-ldssp" Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.170663 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/aa7d4456-c058-4b02-bdf7-4ea41d52e777-config\") pod \"kube-apiserver-operator-766d6c64bb-h2hml\" (UID: \"aa7d4456-c058-4b02-bdf7-4ea41d52e777\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-h2hml" Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.170679 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/e3ff0f1d-5141-47e0-b414-db59edba635c-images\") pod \"machine-config-operator-74547568cd-b89qf\" (UID: \"e3ff0f1d-5141-47e0-b414-db59edba635c\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-b89qf" Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.170708 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/1a5d2b1d-2a67-490d-8f55-45a7a0219457-webhook-cert\") pod \"packageserver-d55dfcdfc-vt754\" (UID: \"1a5d2b1d-2a67-490d-8f55-45a7a0219457\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-vt754" Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.170728 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/e3ff0f1d-5141-47e0-b414-db59edba635c-proxy-tls\") pod \"machine-config-operator-74547568cd-b89qf\" (UID: \"e3ff0f1d-5141-47e0-b414-db59edba635c\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-b89qf" Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.170749 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a09c3bef-aa53-4bb8-9cf1-b691a3276ed4-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-d27m9\" (UID: \"a09c3bef-aa53-4bb8-9cf1-b691a3276ed4\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-d27m9" Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.170772 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mk9j7\" (UniqueName: \"kubernetes.io/projected/a09c3bef-aa53-4bb8-9cf1-b691a3276ed4-kube-api-access-mk9j7\") pod \"kube-storage-version-migrator-operator-b67b599dd-d27m9\" (UID: \"a09c3bef-aa53-4bb8-9cf1-b691a3276ed4\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-d27m9" Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.170867 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/1b36a633-3ac6-4670-aa21-b5e3f750484f-metrics-certs\") pod \"router-default-5444994796-ldssp\" (UID: \"1b36a633-3ac6-4670-aa21-b5e3f750484f\") " pod="openshift-ingress/router-default-5444994796-ldssp" Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.170896 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/837e9a61-a894-44bf-981a-1bfae662e1e8-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-nv8fj\" (UID: \"837e9a61-a894-44bf-981a-1bfae662e1e8\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-nv8fj" Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.170938 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6b7b0964-7f17-4f2f-8a3f-f5e5171fec41-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-bnclj\" (UID: \"6b7b0964-7f17-4f2f-8a3f-f5e5171fec41\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-bnclj" Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.170973 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/aa7d4456-c058-4b02-bdf7-4ea41d52e777-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-h2hml\" (UID: \"aa7d4456-c058-4b02-bdf7-4ea41d52e777\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-h2hml" Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.171015 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/59997681-59ad-46c1-b61e-5206099176d6-metrics-tls\") pod \"ingress-operator-5b745b69d9-ljgr4\" (UID: \"59997681-59ad-46c1-b61e-5206099176d6\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-ljgr4" Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.171049 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/c24d14ff-9dd4-47e0-9a5c-3e03e88b9aee-srv-cert\") pod \"olm-operator-6b444d44fb-w22fq\" (UID: \"c24d14ff-9dd4-47e0-9a5c-3e03e88b9aee\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-w22fq" Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.171089 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/56300584-499b-4d05-ada3-93dade9c9d9e-srv-cert\") pod \"catalog-operator-68c6474976-p4lzh\" (UID: \"56300584-499b-4d05-ada3-93dade9c9d9e\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-p4lzh" Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.171113 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s9zsl\" (UniqueName: \"kubernetes.io/projected/1a5d2b1d-2a67-490d-8f55-45a7a0219457-kube-api-access-s9zsl\") pod \"packageserver-d55dfcdfc-vt754\" (UID: \"1a5d2b1d-2a67-490d-8f55-45a7a0219457\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-vt754" Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.171134 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/ea34b349-a47a-4632-9fc6-b86e0d606e54-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-bfnfv\" (UID: \"ea34b349-a47a-4632-9fc6-b86e0d606e54\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-bfnfv" Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.171191 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1b36a633-3ac6-4670-aa21-b5e3f750484f-service-ca-bundle\") pod \"router-default-5444994796-ldssp\" (UID: \"1b36a633-3ac6-4670-aa21-b5e3f750484f\") " pod="openshift-ingress/router-default-5444994796-ldssp" Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.171214 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hj5jh\" (UniqueName: \"kubernetes.io/projected/502b8b77-8c80-4cc0-8590-6fb9ce342289-kube-api-access-hj5jh\") pod \"marketplace-operator-79b997595-6r69d\" (UID: \"502b8b77-8c80-4cc0-8590-6fb9ce342289\") " pod="openshift-marketplace/marketplace-operator-79b997595-6r69d" Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.171250 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7f2a0e30-8d34-4540-b7b5-99db8dc99d05-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-x758z\" (UID: \"7f2a0e30-8d34-4540-b7b5-99db8dc99d05\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-x758z" Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.173223 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"cluster-samples-operator-dockercfg-xpp9w" Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.173364 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-canary/ingress-canary-f6x5z" Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.176206 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7f2a0e30-8d34-4540-b7b5-99db8dc99d05-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-x758z\" (UID: \"7f2a0e30-8d34-4540-b7b5-99db8dc99d05\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-x758z" Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.177410 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6b7b0964-7f17-4f2f-8a3f-f5e5171fec41-config\") pod \"kube-controller-manager-operator-78b949d7b-bnclj\" (UID: \"6b7b0964-7f17-4f2f-8a3f-f5e5171fec41\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-bnclj" Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.181777 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/1b36a633-3ac6-4670-aa21-b5e3f750484f-stats-auth\") pod \"router-default-5444994796-ldssp\" (UID: \"1b36a633-3ac6-4670-aa21-b5e3f750484f\") " pod="openshift-ingress/router-default-5444994796-ldssp" Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.186499 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns/dns-default-kmxdh"] Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.189458 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a09c3bef-aa53-4bb8-9cf1-b691a3276ed4-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-d27m9\" (UID: \"a09c3bef-aa53-4bb8-9cf1-b691a3276ed4\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-d27m9" Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.191413 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-6r69d"] Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.191618 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/dns-default-kmxdh" Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.191560 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/837e9a61-a894-44bf-981a-1bfae662e1e8-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-nv8fj\" (UID: \"837e9a61-a894-44bf-981a-1bfae662e1e8\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-nv8fj" Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.193771 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/1a5d2b1d-2a67-490d-8f55-45a7a0219457-apiservice-cert\") pod \"packageserver-d55dfcdfc-vt754\" (UID: \"1a5d2b1d-2a67-490d-8f55-45a7a0219457\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-vt754" Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.198714 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/502b8b77-8c80-4cc0-8590-6fb9ce342289-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-6r69d\" (UID: \"502b8b77-8c80-4cc0-8590-6fb9ce342289\") " pod="openshift-marketplace/marketplace-operator-79b997595-6r69d" Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.198996 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/1b36a633-3ac6-4670-aa21-b5e3f750484f-metrics-certs\") pod \"router-default-5444994796-ldssp\" (UID: \"1b36a633-3ac6-4670-aa21-b5e3f750484f\") " pod="openshift-ingress/router-default-5444994796-ldssp" Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.199290 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-dvq4s"] Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.199800 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/502b8b77-8c80-4cc0-8590-6fb9ce342289-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-6r69d\" (UID: \"502b8b77-8c80-4cc0-8590-6fb9ce342289\") " pod="openshift-marketplace/marketplace-operator-79b997595-6r69d" Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.200010 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/e3ff0f1d-5141-47e0-b414-db59edba635c-proxy-tls\") pod \"machine-config-operator-74547568cd-b89qf\" (UID: \"e3ff0f1d-5141-47e0-b414-db59edba635c\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-b89qf" Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.200572 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/56300584-499b-4d05-ada3-93dade9c9d9e-profile-collector-cert\") pod \"catalog-operator-68c6474976-p4lzh\" (UID: \"56300584-499b-4d05-ada3-93dade9c9d9e\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-p4lzh" Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.200634 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"samples-operator-tls" Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.201634 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-x758z"] Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.201612 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"kube-root-ca.crt" Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.201881 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7f2a0e30-8d34-4540-b7b5-99db8dc99d05-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-x758z\" (UID: \"7f2a0e30-8d34-4540-b7b5-99db8dc99d05\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-x758z" Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.204801 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/e3ff0f1d-5141-47e0-b414-db59edba635c-auth-proxy-config\") pod \"machine-config-operator-74547568cd-b89qf\" (UID: \"e3ff0f1d-5141-47e0-b414-db59edba635c\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-b89qf" Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.205301 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"images\" (UniqueName: \"kubernetes.io/configmap/e3ff0f1d-5141-47e0-b414-db59edba635c-images\") pod \"machine-config-operator-74547568cd-b89qf\" (UID: \"e3ff0f1d-5141-47e0-b414-db59edba635c\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-b89qf" Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.206289 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/c24d14ff-9dd4-47e0-9a5c-3e03e88b9aee-profile-collector-cert\") pod \"olm-operator-6b444d44fb-w22fq\" (UID: \"c24d14ff-9dd4-47e0-9a5c-3e03e88b9aee\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-w22fq" Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.206349 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-vmw2h"] Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.207583 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1b36a633-3ac6-4670-aa21-b5e3f750484f-service-ca-bundle\") pod \"router-default-5444994796-ldssp\" (UID: \"1b36a633-3ac6-4670-aa21-b5e3f750484f\") " pod="openshift-ingress/router-default-5444994796-ldssp" Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.211713 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/1a5d2b1d-2a67-490d-8f55-45a7a0219457-tmpfs\") pod \"packageserver-d55dfcdfc-vt754\" (UID: \"1a5d2b1d-2a67-490d-8f55-45a7a0219457\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-vt754" Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.213225 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/1b36a633-3ac6-4670-aa21-b5e3f750484f-default-certificate\") pod \"router-default-5444994796-ldssp\" (UID: \"1b36a633-3ac6-4670-aa21-b5e3f750484f\") " pod="openshift-ingress/router-default-5444994796-ldssp" Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.213666 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/1a5d2b1d-2a67-490d-8f55-45a7a0219457-webhook-cert\") pod \"packageserver-d55dfcdfc-vt754\" (UID: \"1a5d2b1d-2a67-490d-8f55-45a7a0219457\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-vt754" Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.214313 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/837e9a61-a894-44bf-981a-1bfae662e1e8-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-nv8fj\" (UID: \"837e9a61-a894-44bf-981a-1bfae662e1e8\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-nv8fj" Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.214355 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/56300584-499b-4d05-ada3-93dade9c9d9e-srv-cert\") pod \"catalog-operator-68c6474976-p4lzh\" (UID: \"56300584-499b-4d05-ada3-93dade9c9d9e\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-p4lzh" Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.214605 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a09c3bef-aa53-4bb8-9cf1-b691a3276ed4-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-d27m9\" (UID: \"a09c3bef-aa53-4bb8-9cf1-b691a3276ed4\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-d27m9" Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.215695 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/aa7d4456-c058-4b02-bdf7-4ea41d52e777-config\") pod \"kube-apiserver-operator-766d6c64bb-h2hml\" (UID: \"aa7d4456-c058-4b02-bdf7-4ea41d52e777\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-h2hml" Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.217103 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6b7b0964-7f17-4f2f-8a3f-f5e5171fec41-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-bnclj\" (UID: \"6b7b0964-7f17-4f2f-8a3f-f5e5171fec41\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-bnclj" Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.218025 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-bfnfv"] Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.218914 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/c24d14ff-9dd4-47e0-9a5c-3e03e88b9aee-srv-cert\") pod \"olm-operator-6b444d44fb-w22fq\" (UID: \"c24d14ff-9dd4-47e0-9a5c-3e03e88b9aee\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-w22fq" Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.220794 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-d27m9"] Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.222951 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/ea34b349-a47a-4632-9fc6-b86e0d606e54-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-bfnfv\" (UID: \"ea34b349-a47a-4632-9fc6-b86e0d606e54\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-bfnfv" Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.225915 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/aa7d4456-c058-4b02-bdf7-4ea41d52e777-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-h2hml\" (UID: \"aa7d4456-c058-4b02-bdf7-4ea41d52e777\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-h2hml" Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.227717 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-server-n4krv"] Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.229345 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-b89qf"] Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.229480 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-server-n4krv" Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.229822 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-vt754"] Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.232967 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-nv8fj"] Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.233005 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-sqpsg"] Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.238954 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-p4lzh"] Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.243906 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-h2hml"] Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.252570 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-sp272"] Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.254027 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-bnclj"] Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.257228 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-ktzhf"] Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.257919 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dzrsr\" (UniqueName: \"kubernetes.io/projected/2b5b8e64-d53b-407e-a10f-d4fed5afd70c-kube-api-access-dzrsr\") pod \"apiserver-7bbb656c7d-vmw2h\" (UID: \"2b5b8e64-d53b-407e-a10f-d4fed5afd70c\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-vmw2h" Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.258512 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns/dns-default-kmxdh"] Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.260040 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-t4h92"] Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.261804 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-lwr4w"] Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.263947 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-857r4"] Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.266240 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-vw7wd"] Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.267944 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-r647z"] Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.269057 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-canary/ingress-canary-f6x5z"] Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.270127 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-fnpzd"] Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.271110 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-bp8h2"] Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.272480 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-8wffr"] Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.272625 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/59997681-59ad-46c1-b61e-5206099176d6-trusted-ca\") pod \"ingress-operator-5b745b69d9-ljgr4\" (UID: \"59997681-59ad-46c1-b61e-5206099176d6\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-ljgr4" Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.272687 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/59997681-59ad-46c1-b61e-5206099176d6-bound-sa-token\") pod \"ingress-operator-5b745b69d9-ljgr4\" (UID: \"59997681-59ad-46c1-b61e-5206099176d6\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-ljgr4" Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.272724 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l8kcf\" (UniqueName: \"kubernetes.io/projected/59997681-59ad-46c1-b61e-5206099176d6-kube-api-access-l8kcf\") pod \"ingress-operator-5b745b69d9-ljgr4\" (UID: \"59997681-59ad-46c1-b61e-5206099176d6\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-ljgr4" Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.272882 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/59997681-59ad-46c1-b61e-5206099176d6-metrics-tls\") pod \"ingress-operator-5b745b69d9-ljgr4\" (UID: \"59997681-59ad-46c1-b61e-5206099176d6\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-ljgr4" Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.273713 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319525-kwqwh"] Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.274766 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/59997681-59ad-46c1-b61e-5206099176d6-trusted-ca\") pod \"ingress-operator-5b745b69d9-ljgr4\" (UID: \"59997681-59ad-46c1-b61e-5206099176d6\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-ljgr4" Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.275151 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-ljgr4"] Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.276293 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console-operator/console-operator-58897d9998-bpj7h"] Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.277453 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-xkp78"] Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.279194 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-ln92w"] Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.279380 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="hostpath-provisioner/csi-hostpathplugin-xkp78" Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.279682 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-w569t"] Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.279840 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wgwqq\" (UniqueName: \"kubernetes.io/projected/3a5115bb-23d8-4ff0-9c56-419450cd87fe-kube-api-access-wgwqq\") pod \"controller-manager-879f6c89f-p9pds\" (UID: \"3a5115bb-23d8-4ff0-9c56-419450cd87fe\") " pod="openshift-controller-manager/controller-manager-879f6c89f-p9pds" Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.280751 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/downloads-7954f5f757-7phmq"] Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.281548 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/59997681-59ad-46c1-b61e-5206099176d6-metrics-tls\") pod \"ingress-operator-5b745b69d9-ljgr4\" (UID: \"59997681-59ad-46c1-b61e-5206099176d6\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-ljgr4" Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.282543 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-4pj4c"] Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.283990 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9d7485db-7pt7w"] Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.285082 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-xkp78"] Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.298246 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5d25q\" (UniqueName: \"kubernetes.io/projected/4b90ed41-b2cd-4525-b5e2-11513ee0c763-kube-api-access-5d25q\") pod \"apiserver-76f77b778f-4bgtx\" (UID: \"4b90ed41-b2cd-4525-b5e2-11513ee0c763\") " pod="openshift-apiserver/apiserver-76f77b778f-4bgtx" Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.317782 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5n95f\" (UniqueName: \"kubernetes.io/projected/ada0d65e-bf4b-40ec-a03f-d0009526f8b6-kube-api-access-5n95f\") pod \"openshift-apiserver-operator-796bbdcf4f-t9hk9\" (UID: \"ada0d65e-bf4b-40ec-a03f-d0009526f8b6\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-t9hk9" Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.339017 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vsdl6\" (UniqueName: \"kubernetes.io/projected/fcd90e7a-6ae8-4065-bcab-5d3f8fffc5a1-kube-api-access-vsdl6\") pod \"machine-approver-56656f9798-5j6xh\" (UID: \"fcd90e7a-6ae8-4065-bcab-5d3f8fffc5a1\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-5j6xh" Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.362901 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mcc-proxy-tls" Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.364356 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4bktj\" (UniqueName: \"kubernetes.io/projected/53064b9c-6401-4332-b64a-b8cbc84ae37c-kube-api-access-4bktj\") pod \"route-controller-manager-6576b87f9c-l44c7\" (UID: \"53064b9c-6401-4332-b64a-b8cbc84ae37c\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-l44c7" Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.382832 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"openshift-service-ca.crt" Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.405351 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-controller-dockercfg-c2lfx" Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.423014 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-dockercfg-mfbb7" Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.442419 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-tls" Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.459206 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-l44c7" Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.466861 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-rbac-proxy" Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.477529 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-p9pds" Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.484125 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"machine-api-operator-images" Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.488346 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver/apiserver-76f77b778f-4bgtx" Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.495988 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-vmw2h" Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.503223 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-root-ca.crt" Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.519777 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-5j6xh" Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.537730 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-t9hk9" Sep 29 18:58:48 crc kubenswrapper[4792]: W0929 18:58:48.541922 4792 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podfcd90e7a_6ae8_4065_bcab_5d3f8fffc5a1.slice/crio-b53ed9bfb72c0e4fe0b6749647cde29c0de4b04b663417ccb6f4bcb415a39ee6 WatchSource:0}: Error finding container b53ed9bfb72c0e4fe0b6749647cde29c0de4b04b663417ccb6f4bcb415a39ee6: Status 404 returned error can't find the container with id b53ed9bfb72c0e4fe0b6749647cde29c0de4b04b663417ccb6f4bcb415a39ee6 Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.543675 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"service-ca-bundle" Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.563313 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"authentication-operator-dockercfg-mz9bj" Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.583652 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"serving-cert" Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.603696 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"kube-root-ca.crt" Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.622741 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"openshift-service-ca.crt" Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.643354 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"authentication-operator-config" Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.670534 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"trusted-ca-bundle" Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.683143 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"openshift-service-ca.crt" Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.702942 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"console-operator-dockercfg-4xjcr" Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.704615 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-l44c7"] Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.724902 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"serving-cert" Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.738059 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-5j6xh" event={"ID":"fcd90e7a-6ae8-4065-bcab-5d3f8fffc5a1","Type":"ContainerStarted","Data":"b53ed9bfb72c0e4fe0b6749647cde29c0de4b04b663417ccb6f4bcb415a39ee6"} Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.742343 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-l44c7" event={"ID":"53064b9c-6401-4332-b64a-b8cbc84ae37c","Type":"ContainerStarted","Data":"07730ba3d41660f554a2e8df96306b54d02a57b919e76f6fd01652dcc057e533"} Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.742529 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"console-operator-config" Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.769139 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"trusted-ca" Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.783346 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"kube-root-ca.crt" Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.804639 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-dockercfg-k9rxt" Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.823182 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-tls" Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.844249 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"kube-root-ca.crt" Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.863096 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"service-ca-operator-dockercfg-rg9jl" Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.882230 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"serving-cert" Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.892629 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-t9hk9"] Sep 29 18:58:48 crc kubenswrapper[4792]: W0929 18:58:48.898833 4792 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podada0d65e_bf4b_40ec_a03f_d0009526f8b6.slice/crio-5a4e1b682d7880cb218f81624b07c95e6cb04ccb464b32528fdcc8723bffe0c6 WatchSource:0}: Error finding container 5a4e1b682d7880cb218f81624b07c95e6cb04ccb464b32528fdcc8723bffe0c6: Status 404 returned error can't find the container with id 5a4e1b682d7880cb218f81624b07c95e6cb04ccb464b32528fdcc8723bffe0c6 Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.903274 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"openshift-service-ca.crt" Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.922980 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"service-ca-operator-config" Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.942095 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"openshift-service-ca.crt" Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.963703 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"service-ca-dockercfg-pn86c" Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.985115 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"signing-key" Sep 29 18:58:48 crc kubenswrapper[4792]: I0929 18:58:48.990570 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-vmw2h"] Sep 29 18:58:49 crc kubenswrapper[4792]: I0929 18:58:49.002034 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"signing-cabundle" Sep 29 18:58:49 crc kubenswrapper[4792]: I0929 18:58:49.010099 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-p9pds"] Sep 29 18:58:49 crc kubenswrapper[4792]: I0929 18:58:49.010838 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-4bgtx"] Sep 29 18:58:49 crc kubenswrapper[4792]: I0929 18:58:49.022706 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"kube-root-ca.crt" Sep 29 18:58:49 crc kubenswrapper[4792]: I0929 18:58:49.042469 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"openshift-service-ca.crt" Sep 29 18:58:49 crc kubenswrapper[4792]: W0929 18:58:49.047954 4792 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3a5115bb_23d8_4ff0_9c56_419450cd87fe.slice/crio-01e0c786da2dd100626d7cf7cee6af30949c03fbc18fac6427b3e3bff5fb630d WatchSource:0}: Error finding container 01e0c786da2dd100626d7cf7cee6af30949c03fbc18fac6427b3e3bff5fb630d: Status 404 returned error can't find the container with id 01e0c786da2dd100626d7cf7cee6af30949c03fbc18fac6427b3e3bff5fb630d Sep 29 18:58:49 crc kubenswrapper[4792]: W0929 18:58:49.048979 4792 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod4b90ed41_b2cd_4525_b5e2_11513ee0c763.slice/crio-f78fce17f83c64f5e81f5b8ff830cb40704b41deddaa9120375c9eac0b81a90c WatchSource:0}: Error finding container f78fce17f83c64f5e81f5b8ff830cb40704b41deddaa9120375c9eac0b81a90c: Status 404 returned error can't find the container with id f78fce17f83c64f5e81f5b8ff830cb40704b41deddaa9120375c9eac0b81a90c Sep 29 18:58:49 crc kubenswrapper[4792]: I0929 18:58:49.062645 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"default-dockercfg-chnjx" Sep 29 18:58:49 crc kubenswrapper[4792]: I0929 18:58:49.083295 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"kube-root-ca.crt" Sep 29 18:58:49 crc kubenswrapper[4792]: I0929 18:58:49.103941 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"openshift-service-ca.crt" Sep 29 18:58:49 crc kubenswrapper[4792]: I0929 18:58:49.121965 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"oauth-serving-cert" Sep 29 18:58:49 crc kubenswrapper[4792]: I0929 18:58:49.141471 4792 request.go:700] Waited for 1.001295024s due to client-side throttling, not priority and fairness, request: GET:https://api-int.crc.testing:6443/api/v1/namespaces/openshift-etcd-operator/secrets?fieldSelector=metadata.name%3Detcd-operator-dockercfg-r9srn&limit=500&resourceVersion=0 Sep 29 18:58:49 crc kubenswrapper[4792]: I0929 18:58:49.143498 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-dockercfg-r9srn" Sep 29 18:58:49 crc kubenswrapper[4792]: I0929 18:58:49.162810 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-serving-cert" Sep 29 18:58:49 crc kubenswrapper[4792]: I0929 18:58:49.183734 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-client" Sep 29 18:58:49 crc kubenswrapper[4792]: I0929 18:58:49.203132 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-operator-config" Sep 29 18:58:49 crc kubenswrapper[4792]: I0929 18:58:49.222613 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-ca-bundle" Sep 29 18:58:49 crc kubenswrapper[4792]: I0929 18:58:49.243114 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-service-ca-bundle" Sep 29 18:58:49 crc kubenswrapper[4792]: I0929 18:58:49.263074 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"kube-root-ca.crt" Sep 29 18:58:49 crc kubenswrapper[4792]: I0929 18:58:49.282694 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-dockercfg-f62pw" Sep 29 18:58:49 crc kubenswrapper[4792]: I0929 18:58:49.303161 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-serving-cert" Sep 29 18:58:49 crc kubenswrapper[4792]: I0929 18:58:49.323261 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-oauth-config" Sep 29 18:58:49 crc kubenswrapper[4792]: I0929 18:58:49.343146 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"console-config" Sep 29 18:58:49 crc kubenswrapper[4792]: I0929 18:58:49.362787 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"service-ca" Sep 29 18:58:49 crc kubenswrapper[4792]: I0929 18:58:49.387642 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"trusted-ca-bundle" Sep 29 18:58:49 crc kubenswrapper[4792]: I0929 18:58:49.408686 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-trusted-ca-bundle" Sep 29 18:58:49 crc kubenswrapper[4792]: I0929 18:58:49.422150 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"oauth-openshift-dockercfg-znhcc" Sep 29 18:58:49 crc kubenswrapper[4792]: I0929 18:58:49.442609 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-router-certs" Sep 29 18:58:49 crc kubenswrapper[4792]: I0929 18:58:49.465575 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-login" Sep 29 18:58:49 crc kubenswrapper[4792]: I0929 18:58:49.483644 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-provider-selection" Sep 29 18:58:49 crc kubenswrapper[4792]: I0929 18:58:49.503876 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-session" Sep 29 18:58:49 crc kubenswrapper[4792]: I0929 18:58:49.522990 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-serving-cert" Sep 29 18:58:49 crc kubenswrapper[4792]: I0929 18:58:49.552591 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-ocp-branding-template" Sep 29 18:58:49 crc kubenswrapper[4792]: I0929 18:58:49.562563 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-error" Sep 29 18:58:49 crc kubenswrapper[4792]: I0929 18:58:49.583151 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-idp-0-file-data" Sep 29 18:58:49 crc kubenswrapper[4792]: I0929 18:58:49.602614 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"kube-root-ca.crt" Sep 29 18:58:49 crc kubenswrapper[4792]: I0929 18:58:49.623031 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"openshift-service-ca.crt" Sep 29 18:58:49 crc kubenswrapper[4792]: I0929 18:58:49.643510 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"audit" Sep 29 18:58:49 crc kubenswrapper[4792]: I0929 18:58:49.662751 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-service-ca" Sep 29 18:58:49 crc kubenswrapper[4792]: I0929 18:58:49.682723 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-cliconfig" Sep 29 18:58:49 crc kubenswrapper[4792]: I0929 18:58:49.703835 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"openshift-service-ca.crt" Sep 29 18:58:49 crc kubenswrapper[4792]: I0929 18:58:49.722943 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"dns-operator-dockercfg-9mqw5" Sep 29 18:58:49 crc kubenswrapper[4792]: I0929 18:58:49.742335 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"metrics-tls" Sep 29 18:58:49 crc kubenswrapper[4792]: I0929 18:58:49.746918 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-p9pds" event={"ID":"3a5115bb-23d8-4ff0-9c56-419450cd87fe","Type":"ContainerStarted","Data":"e3700f1867c66f68f7b1117cca4480ba4f2a90f644403b4f33cf5ab79858811d"} Sep 29 18:58:49 crc kubenswrapper[4792]: I0929 18:58:49.746969 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-p9pds" event={"ID":"3a5115bb-23d8-4ff0-9c56-419450cd87fe","Type":"ContainerStarted","Data":"01e0c786da2dd100626d7cf7cee6af30949c03fbc18fac6427b3e3bff5fb630d"} Sep 29 18:58:49 crc kubenswrapper[4792]: I0929 18:58:49.747977 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-879f6c89f-p9pds" Sep 29 18:58:49 crc kubenswrapper[4792]: I0929 18:58:49.749912 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-l44c7" event={"ID":"53064b9c-6401-4332-b64a-b8cbc84ae37c","Type":"ContainerStarted","Data":"e31089b68510000b294ad308d14550309bb7bb3fb7f4e4fd50a23ce0abaaa354"} Sep 29 18:58:49 crc kubenswrapper[4792]: I0929 18:58:49.750483 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-l44c7" Sep 29 18:58:49 crc kubenswrapper[4792]: I0929 18:58:49.750558 4792 patch_prober.go:28] interesting pod/controller-manager-879f6c89f-p9pds container/controller-manager namespace/openshift-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.6:8443/healthz\": dial tcp 10.217.0.6:8443: connect: connection refused" start-of-body= Sep 29 18:58:49 crc kubenswrapper[4792]: I0929 18:58:49.750586 4792 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-controller-manager/controller-manager-879f6c89f-p9pds" podUID="3a5115bb-23d8-4ff0-9c56-419450cd87fe" containerName="controller-manager" probeResult="failure" output="Get \"https://10.217.0.6:8443/healthz\": dial tcp 10.217.0.6:8443: connect: connection refused" Sep 29 18:58:49 crc kubenswrapper[4792]: I0929 18:58:49.754051 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-5j6xh" event={"ID":"fcd90e7a-6ae8-4065-bcab-5d3f8fffc5a1","Type":"ContainerStarted","Data":"8e049c02bd96d88f29ae38e013a61ea87adc9e162ebb7d433811273272e73f47"} Sep 29 18:58:49 crc kubenswrapper[4792]: I0929 18:58:49.754080 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-5j6xh" event={"ID":"fcd90e7a-6ae8-4065-bcab-5d3f8fffc5a1","Type":"ContainerStarted","Data":"7964aeb6ae0417f8ef52313bbd6f9ac412213993e5c756ee321108562646f56d"} Sep 29 18:58:49 crc kubenswrapper[4792]: I0929 18:58:49.756308 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-t9hk9" event={"ID":"ada0d65e-bf4b-40ec-a03f-d0009526f8b6","Type":"ContainerStarted","Data":"48d67310dce1fadecb282ec87840dc13ee2ce2bf8bb06ada68e99f128dc52ddd"} Sep 29 18:58:49 crc kubenswrapper[4792]: I0929 18:58:49.756368 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-t9hk9" event={"ID":"ada0d65e-bf4b-40ec-a03f-d0009526f8b6","Type":"ContainerStarted","Data":"5a4e1b682d7880cb218f81624b07c95e6cb04ccb464b32528fdcc8723bffe0c6"} Sep 29 18:58:49 crc kubenswrapper[4792]: I0929 18:58:49.758172 4792 generic.go:334] "Generic (PLEG): container finished" podID="4b90ed41-b2cd-4525-b5e2-11513ee0c763" containerID="c6bb3b2afda30fc05f6eac1646845d73bc965780aa0b4a401526ceebcc197362" exitCode=0 Sep 29 18:58:49 crc kubenswrapper[4792]: I0929 18:58:49.758248 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-4bgtx" event={"ID":"4b90ed41-b2cd-4525-b5e2-11513ee0c763","Type":"ContainerDied","Data":"c6bb3b2afda30fc05f6eac1646845d73bc965780aa0b4a401526ceebcc197362"} Sep 29 18:58:49 crc kubenswrapper[4792]: I0929 18:58:49.758271 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-4bgtx" event={"ID":"4b90ed41-b2cd-4525-b5e2-11513ee0c763","Type":"ContainerStarted","Data":"f78fce17f83c64f5e81f5b8ff830cb40704b41deddaa9120375c9eac0b81a90c"} Sep 29 18:58:49 crc kubenswrapper[4792]: I0929 18:58:49.761463 4792 generic.go:334] "Generic (PLEG): container finished" podID="2b5b8e64-d53b-407e-a10f-d4fed5afd70c" containerID="c8b79d38104ceb09644db47b2c436240d2e08a803802fac68f4ad90767c1ca2f" exitCode=0 Sep 29 18:58:49 crc kubenswrapper[4792]: I0929 18:58:49.761496 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-vmw2h" event={"ID":"2b5b8e64-d53b-407e-a10f-d4fed5afd70c","Type":"ContainerDied","Data":"c8b79d38104ceb09644db47b2c436240d2e08a803802fac68f4ad90767c1ca2f"} Sep 29 18:58:49 crc kubenswrapper[4792]: I0929 18:58:49.761516 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-vmw2h" event={"ID":"2b5b8e64-d53b-407e-a10f-d4fed5afd70c","Type":"ContainerStarted","Data":"c2a7927f6de05e119131c41bf4caebc5d55ce6db32f8340830a2c84b77379c02"} Sep 29 18:58:49 crc kubenswrapper[4792]: I0929 18:58:49.762595 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"kube-root-ca.crt" Sep 29 18:58:49 crc kubenswrapper[4792]: I0929 18:58:49.802244 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"kube-root-ca.crt" Sep 29 18:58:49 crc kubenswrapper[4792]: I0929 18:58:49.810863 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xd799\" (UniqueName: \"kubernetes.io/projected/837e9a61-a894-44bf-981a-1bfae662e1e8-kube-api-access-xd799\") pod \"cluster-image-registry-operator-dc59b4c8b-nv8fj\" (UID: \"837e9a61-a894-44bf-981a-1bfae662e1e8\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-nv8fj" Sep 29 18:58:49 crc kubenswrapper[4792]: I0929 18:58:49.822816 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"default-dockercfg-2llfx" Sep 29 18:58:49 crc kubenswrapper[4792]: I0929 18:58:49.842551 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"canary-serving-cert" Sep 29 18:58:49 crc kubenswrapper[4792]: I0929 18:58:49.868640 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"openshift-service-ca.crt" Sep 29 18:58:49 crc kubenswrapper[4792]: I0929 18:58:49.898485 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/7f2a0e30-8d34-4540-b7b5-99db8dc99d05-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-x758z\" (UID: \"7f2a0e30-8d34-4540-b7b5-99db8dc99d05\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-x758z" Sep 29 18:58:49 crc kubenswrapper[4792]: I0929 18:58:49.919522 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lhjtz\" (UniqueName: \"kubernetes.io/projected/0d4ea471-e5cc-4571-9d2d-baab1747a457-kube-api-access-lhjtz\") pod \"migrator-59844c95c7-sqpsg\" (UID: \"0d4ea471-e5cc-4571-9d2d-baab1747a457\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-sqpsg" Sep 29 18:58:49 crc kubenswrapper[4792]: I0929 18:58:49.936150 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mk9j7\" (UniqueName: \"kubernetes.io/projected/a09c3bef-aa53-4bb8-9cf1-b691a3276ed4-kube-api-access-mk9j7\") pod \"kube-storage-version-migrator-operator-b67b599dd-d27m9\" (UID: \"a09c3bef-aa53-4bb8-9cf1-b691a3276ed4\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-d27m9" Sep 29 18:58:49 crc kubenswrapper[4792]: I0929 18:58:49.949419 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"dns-default" Sep 29 18:58:49 crc kubenswrapper[4792]: I0929 18:58:49.964088 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-dockercfg-jwfmh" Sep 29 18:58:49 crc kubenswrapper[4792]: I0929 18:58:49.985181 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-default-metrics-tls" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.002312 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-x758z" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.021825 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-446wq\" (UniqueName: \"kubernetes.io/projected/e3ff0f1d-5141-47e0-b414-db59edba635c-kube-api-access-446wq\") pod \"machine-config-operator-74547568cd-b89qf\" (UID: \"e3ff0f1d-5141-47e0-b414-db59edba635c\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-b89qf" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.027517 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-d27m9" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.041721 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-sqpsg" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.052657 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-b89qf" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.058757 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k2qbd\" (UniqueName: \"kubernetes.io/projected/56300584-499b-4d05-ada3-93dade9c9d9e-kube-api-access-k2qbd\") pod \"catalog-operator-68c6474976-p4lzh\" (UID: \"56300584-499b-4d05-ada3-93dade9c9d9e\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-p4lzh" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.061182 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/837e9a61-a894-44bf-981a-1bfae662e1e8-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-nv8fj\" (UID: \"837e9a61-a894-44bf-981a-1bfae662e1e8\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-nv8fj" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.078642 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w8x92\" (UniqueName: \"kubernetes.io/projected/c24d14ff-9dd4-47e0-9a5c-3e03e88b9aee-kube-api-access-w8x92\") pod \"olm-operator-6b444d44fb-w22fq\" (UID: \"c24d14ff-9dd4-47e0-9a5c-3e03e88b9aee\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-w22fq" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.110586 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hj5jh\" (UniqueName: \"kubernetes.io/projected/502b8b77-8c80-4cc0-8590-6fb9ce342289-kube-api-access-hj5jh\") pod \"marketplace-operator-79b997595-6r69d\" (UID: \"502b8b77-8c80-4cc0-8590-6fb9ce342289\") " pod="openshift-marketplace/marketplace-operator-79b997595-6r69d" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.124291 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/6b7b0964-7f17-4f2f-8a3f-f5e5171fec41-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-bnclj\" (UID: \"6b7b0964-7f17-4f2f-8a3f-f5e5171fec41\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-bnclj" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.143698 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s9zsl\" (UniqueName: \"kubernetes.io/projected/1a5d2b1d-2a67-490d-8f55-45a7a0219457-kube-api-access-s9zsl\") pod \"packageserver-d55dfcdfc-vt754\" (UID: \"1a5d2b1d-2a67-490d-8f55-45a7a0219457\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-vt754" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.155828 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-p4lzh" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.160802 4792 request.go:700] Waited for 1.946285764s due to client-side throttling, not priority and fairness, request: POST:https://api-int.crc.testing:6443/api/v1/namespaces/openshift-ingress/serviceaccounts/router/token Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.166925 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/aa7d4456-c058-4b02-bdf7-4ea41d52e777-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-h2hml\" (UID: \"aa7d4456-c058-4b02-bdf7-4ea41d52e777\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-h2hml" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.181662 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rs2wt\" (UniqueName: \"kubernetes.io/projected/1b36a633-3ac6-4670-aa21-b5e3f750484f-kube-api-access-rs2wt\") pod \"router-default-5444994796-ldssp\" (UID: \"1b36a633-3ac6-4670-aa21-b5e3f750484f\") " pod="openshift-ingress/router-default-5444994796-ldssp" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.208177 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-tls" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.215196 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-l44c7" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.224321 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-dockercfg-qx5rd" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.239550 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-bnclj" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.243272 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cwwrl\" (UniqueName: \"kubernetes.io/projected/ea34b349-a47a-4632-9fc6-b86e0d606e54-kube-api-access-cwwrl\") pod \"package-server-manager-789f6589d5-bfnfv\" (UID: \"ea34b349-a47a-4632-9fc6-b86e0d606e54\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-bfnfv" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.257434 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"node-bootstrapper-token" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.260127 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-6r69d" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.327887 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress/router-default-5444994796-ldssp" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.335601 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/59997681-59ad-46c1-b61e-5206099176d6-bound-sa-token\") pod \"ingress-operator-5b745b69d9-ljgr4\" (UID: \"59997681-59ad-46c1-b61e-5206099176d6\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-ljgr4" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.335796 4792 reflector.go:368] Caches populated for *v1.Secret from object-"hostpath-provisioner"/"csi-hostpath-provisioner-sa-dockercfg-qd74k" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.340153 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l8kcf\" (UniqueName: \"kubernetes.io/projected/59997681-59ad-46c1-b61e-5206099176d6-kube-api-access-l8kcf\") pod \"ingress-operator-5b745b69d9-ljgr4\" (UID: \"59997681-59ad-46c1-b61e-5206099176d6\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-ljgr4" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.349298 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"openshift-service-ca.crt" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.353415 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-nv8fj" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.353485 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-w22fq" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.367771 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-h2hml" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.376685 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"kube-root-ca.crt" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.377115 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-ljgr4" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.411929 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-vt754" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.412679 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/a5fc467b-0e4c-4f20-9729-56906756b33d-config-volume\") pod \"collect-profiles-29319525-kwqwh\" (UID: \"a5fc467b-0e4c-4f20-9729-56906756b33d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319525-kwqwh" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.413335 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6cf82\" (UniqueName: \"kubernetes.io/projected/358b1904-d49e-40a5-b5a7-624709da4e55-kube-api-access-6cf82\") pod \"cluster-samples-operator-665b6dd947-8wffr\" (UID: \"358b1904-d49e-40a5-b5a7-624709da4e55\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-8wffr" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.414606 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0e5e4bed-dea6-4ddd-80a9-2aac8e7f8909-serving-cert\") pod \"openshift-config-operator-7777fb866f-ktzhf\" (UID: \"0e5e4bed-dea6-4ddd-80a9-2aac8e7f8909\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-ktzhf" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.414710 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/5a99395b-1e1f-425f-b934-8ad850a2e8a5-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-sp272\" (UID: \"5a99395b-1e1f-425f-b934-8ad850a2e8a5\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-sp272" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.414796 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/391aee9c-3245-49c5-a150-9d95b16b3c61-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-dvq4s\" (UID: \"391aee9c-3245-49c5-a150-9d95b16b3c61\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-dvq4s" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.414820 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-thdfx\" (UniqueName: \"kubernetes.io/projected/a5fc467b-0e4c-4f20-9729-56906756b33d-kube-api-access-thdfx\") pod \"collect-profiles-29319525-kwqwh\" (UID: \"a5fc467b-0e4c-4f20-9729-56906756b33d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319525-kwqwh" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.414840 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/39e18b3b-156d-46e5-9ace-51ee36c17614-config\") pod \"machine-api-operator-5694c8668f-lwr4w\" (UID: \"39e18b3b-156d-46e5-9ace-51ee36c17614\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-lwr4w" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.414892 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/5a99395b-1e1f-425f-b934-8ad850a2e8a5-proxy-tls\") pod \"machine-config-controller-84d6567774-sp272\" (UID: \"5a99395b-1e1f-425f-b934-8ad850a2e8a5\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-sp272" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.414915 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/b3ccc1f5-4945-4a14-8f84-363683bbd575-installation-pull-secrets\") pod \"image-registry-697d97f7c8-fnpzd\" (UID: \"b3ccc1f5-4945-4a14-8f84-363683bbd575\") " pod="openshift-image-registry/image-registry-697d97f7c8-fnpzd" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.418894 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-bfnfv" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.420451 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/64637bf9-60f4-4394-986b-b2fa4d7fb780-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-r647z\" (UID: \"64637bf9-60f4-4394-986b-b2fa4d7fb780\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-r647z" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.420528 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wtzp9\" (UniqueName: \"kubernetes.io/projected/39e18b3b-156d-46e5-9ace-51ee36c17614-kube-api-access-wtzp9\") pod \"machine-api-operator-5694c8668f-lwr4w\" (UID: \"39e18b3b-156d-46e5-9ace-51ee36c17614\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-lwr4w" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.420591 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/b3ccc1f5-4945-4a14-8f84-363683bbd575-bound-sa-token\") pod \"image-registry-697d97f7c8-fnpzd\" (UID: \"b3ccc1f5-4945-4a14-8f84-363683bbd575\") " pod="openshift-image-registry/image-registry-697d97f7c8-fnpzd" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.420613 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rw2tk\" (UniqueName: \"kubernetes.io/projected/b3ccc1f5-4945-4a14-8f84-363683bbd575-kube-api-access-rw2tk\") pod \"image-registry-697d97f7c8-fnpzd\" (UID: \"b3ccc1f5-4945-4a14-8f84-363683bbd575\") " pod="openshift-image-registry/image-registry-697d97f7c8-fnpzd" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.420629 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/64637bf9-60f4-4394-986b-b2fa4d7fb780-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-r647z\" (UID: \"64637bf9-60f4-4394-986b-b2fa4d7fb780\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-r647z" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.420663 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b3ccc1f5-4945-4a14-8f84-363683bbd575-trusted-ca\") pod \"image-registry-697d97f7c8-fnpzd\" (UID: \"b3ccc1f5-4945-4a14-8f84-363683bbd575\") " pod="openshift-image-registry/image-registry-697d97f7c8-fnpzd" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.420683 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/358b1904-d49e-40a5-b5a7-624709da4e55-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-8wffr\" (UID: \"358b1904-d49e-40a5-b5a7-624709da4e55\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-8wffr" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.420716 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8zfsz\" (UniqueName: \"kubernetes.io/projected/391aee9c-3245-49c5-a150-9d95b16b3c61-kube-api-access-8zfsz\") pod \"multus-admission-controller-857f4d67dd-dvq4s\" (UID: \"391aee9c-3245-49c5-a150-9d95b16b3c61\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-dvq4s" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.420749 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/a5fc467b-0e4c-4f20-9729-56906756b33d-secret-volume\") pod \"collect-profiles-29319525-kwqwh\" (UID: \"a5fc467b-0e4c-4f20-9729-56906756b33d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319525-kwqwh" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.420805 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pjlcz\" (UniqueName: \"kubernetes.io/projected/0e5e4bed-dea6-4ddd-80a9-2aac8e7f8909-kube-api-access-pjlcz\") pod \"openshift-config-operator-7777fb866f-ktzhf\" (UID: \"0e5e4bed-dea6-4ddd-80a9-2aac8e7f8909\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-ktzhf" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.420841 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/39e18b3b-156d-46e5-9ace-51ee36c17614-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-lwr4w\" (UID: \"39e18b3b-156d-46e5-9ace-51ee36c17614\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-lwr4w" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.420897 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/0e5e4bed-dea6-4ddd-80a9-2aac8e7f8909-available-featuregates\") pod \"openshift-config-operator-7777fb866f-ktzhf\" (UID: \"0e5e4bed-dea6-4ddd-80a9-2aac8e7f8909\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-ktzhf" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.420997 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/b3ccc1f5-4945-4a14-8f84-363683bbd575-ca-trust-extracted\") pod \"image-registry-697d97f7c8-fnpzd\" (UID: \"b3ccc1f5-4945-4a14-8f84-363683bbd575\") " pod="openshift-image-registry/image-registry-697d97f7c8-fnpzd" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.421048 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-fnpzd\" (UID: \"b3ccc1f5-4945-4a14-8f84-363683bbd575\") " pod="openshift-image-registry/image-registry-697d97f7c8-fnpzd" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.421098 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-95rbn\" (UniqueName: \"kubernetes.io/projected/64637bf9-60f4-4394-986b-b2fa4d7fb780-kube-api-access-95rbn\") pod \"openshift-controller-manager-operator-756b6f6bc6-r647z\" (UID: \"64637bf9-60f4-4394-986b-b2fa4d7fb780\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-r647z" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.421126 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/b3ccc1f5-4945-4a14-8f84-363683bbd575-registry-tls\") pod \"image-registry-697d97f7c8-fnpzd\" (UID: \"b3ccc1f5-4945-4a14-8f84-363683bbd575\") " pod="openshift-image-registry/image-registry-697d97f7c8-fnpzd" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.421145 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/b3ccc1f5-4945-4a14-8f84-363683bbd575-registry-certificates\") pod \"image-registry-697d97f7c8-fnpzd\" (UID: \"b3ccc1f5-4945-4a14-8f84-363683bbd575\") " pod="openshift-image-registry/image-registry-697d97f7c8-fnpzd" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.421161 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f5gnc\" (UniqueName: \"kubernetes.io/projected/5a99395b-1e1f-425f-b934-8ad850a2e8a5-kube-api-access-f5gnc\") pod \"machine-config-controller-84d6567774-sp272\" (UID: \"5a99395b-1e1f-425f-b934-8ad850a2e8a5\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-sp272" Sep 29 18:58:50 crc kubenswrapper[4792]: E0929 18:58:50.426284 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 18:58:50.926264062 +0000 UTC m=+142.919571458 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-fnpzd" (UID: "b3ccc1f5-4945-4a14-8f84-363683bbd575") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.426833 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/39e18b3b-156d-46e5-9ace-51ee36c17614-images\") pod \"machine-api-operator-5694c8668f-lwr4w\" (UID: \"39e18b3b-156d-46e5-9ace-51ee36c17614\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-lwr4w" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.534490 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.534763 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/b3ccc1f5-4945-4a14-8f84-363683bbd575-registry-certificates\") pod \"image-registry-697d97f7c8-fnpzd\" (UID: \"b3ccc1f5-4945-4a14-8f84-363683bbd575\") " pod="openshift-image-registry/image-registry-697d97f7c8-fnpzd" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.534797 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6tcpz\" (UniqueName: \"kubernetes.io/projected/4d8c1f74-cc55-4f70-afea-f177b99ec47c-kube-api-access-6tcpz\") pod \"dns-operator-744455d44c-vw7wd\" (UID: \"4d8c1f74-cc55-4f70-afea-f177b99ec47c\") " pod="openshift-dns-operator/dns-operator-744455d44c-vw7wd" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.534817 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/7467cb63-a3bc-42a9-88ad-f61eb5475110-cert\") pod \"ingress-canary-f6x5z\" (UID: \"7467cb63-a3bc-42a9-88ad-f61eb5475110\") " pod="openshift-ingress-canary/ingress-canary-f6x5z" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.534837 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/56d0b0e8-6440-4f28-9d05-ad7be713a117-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-4pj4c\" (UID: \"56d0b0e8-6440-4f28-9d05-ad7be713a117\") " pod="openshift-authentication/oauth-openshift-558db77b4-4pj4c" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.534876 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/39e18b3b-156d-46e5-9ace-51ee36c17614-images\") pod \"machine-api-operator-5694c8668f-lwr4w\" (UID: \"39e18b3b-156d-46e5-9ace-51ee36c17614\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-lwr4w" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.534893 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/56d0b0e8-6440-4f28-9d05-ad7be713a117-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-4pj4c\" (UID: \"56d0b0e8-6440-4f28-9d05-ad7be713a117\") " pod="openshift-authentication/oauth-openshift-558db77b4-4pj4c" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.534913 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/a5fc467b-0e4c-4f20-9729-56906756b33d-config-volume\") pod \"collect-profiles-29319525-kwqwh\" (UID: \"a5fc467b-0e4c-4f20-9729-56906756b33d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319525-kwqwh" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.534936 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/56d0b0e8-6440-4f28-9d05-ad7be713a117-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-4pj4c\" (UID: \"56d0b0e8-6440-4f28-9d05-ad7be713a117\") " pod="openshift-authentication/oauth-openshift-558db77b4-4pj4c" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.534955 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/d813bd44-0760-4757-95da-beced796238f-plugins-dir\") pod \"csi-hostpathplugin-xkp78\" (UID: \"d813bd44-0760-4757-95da-beced796238f\") " pod="hostpath-provisioner/csi-hostpathplugin-xkp78" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.534984 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/493abf55-6e87-4745-a90b-5564a4e42dab-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-bp8h2\" (UID: \"493abf55-6e87-4745-a90b-5564a4e42dab\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-bp8h2" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.535001 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/d813bd44-0760-4757-95da-beced796238f-mountpoint-dir\") pod \"csi-hostpathplugin-xkp78\" (UID: \"d813bd44-0760-4757-95da-beced796238f\") " pod="hostpath-provisioner/csi-hostpathplugin-xkp78" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.535020 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/577a8444-c6e3-4aae-922e-12c7cb3b0b11-console-serving-cert\") pod \"console-f9d7485db-7pt7w\" (UID: \"577a8444-c6e3-4aae-922e-12c7cb3b0b11\") " pod="openshift-console/console-f9d7485db-7pt7w" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.535036 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/485f1f0b-73b1-4681-9edb-683eda716bde-serving-cert\") pod \"service-ca-operator-777779d784-ln92w\" (UID: \"485f1f0b-73b1-4681-9edb-683eda716bde\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-ln92w" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.535054 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/56d0b0e8-6440-4f28-9d05-ad7be713a117-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-4pj4c\" (UID: \"56d0b0e8-6440-4f28-9d05-ad7be713a117\") " pod="openshift-authentication/oauth-openshift-558db77b4-4pj4c" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.535074 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/33d62c67-3fa8-48ca-b984-820c3061a4ae-config-volume\") pod \"dns-default-kmxdh\" (UID: \"33d62c67-3fa8-48ca-b984-820c3061a4ae\") " pod="openshift-dns/dns-default-kmxdh" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.535099 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hnknq\" (UniqueName: \"kubernetes.io/projected/493abf55-6e87-4745-a90b-5564a4e42dab-kube-api-access-hnknq\") pod \"authentication-operator-69f744f599-bp8h2\" (UID: \"493abf55-6e87-4745-a90b-5564a4e42dab\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-bp8h2" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.535120 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/577a8444-c6e3-4aae-922e-12c7cb3b0b11-service-ca\") pod \"console-f9d7485db-7pt7w\" (UID: \"577a8444-c6e3-4aae-922e-12c7cb3b0b11\") " pod="openshift-console/console-f9d7485db-7pt7w" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.535139 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/5a99395b-1e1f-425f-b934-8ad850a2e8a5-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-sp272\" (UID: \"5a99395b-1e1f-425f-b934-8ad850a2e8a5\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-sp272" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.535158 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nrv4c\" (UniqueName: \"kubernetes.io/projected/440c70b7-786c-454b-9910-11923a2cf456-kube-api-access-nrv4c\") pod \"service-ca-9c57cc56f-t4h92\" (UID: \"440c70b7-786c-454b-9910-11923a2cf456\") " pod="openshift-service-ca/service-ca-9c57cc56f-t4h92" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.535194 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/391aee9c-3245-49c5-a150-9d95b16b3c61-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-dvq4s\" (UID: \"391aee9c-3245-49c5-a150-9d95b16b3c61\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-dvq4s" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.535211 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/39e18b3b-156d-46e5-9ace-51ee36c17614-config\") pod \"machine-api-operator-5694c8668f-lwr4w\" (UID: \"39e18b3b-156d-46e5-9ace-51ee36c17614\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-lwr4w" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.535229 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09b3ca22-55ce-4f12-9f35-308b6020819f-etcd-client\") pod \"etcd-operator-b45778765-w569t\" (UID: \"09b3ca22-55ce-4f12-9f35-308b6020819f\") " pod="openshift-etcd-operator/etcd-operator-b45778765-w569t" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.535250 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/7ac58a67-2de7-48ec-9a6c-f7cf37538bdd-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-857r4\" (UID: \"7ac58a67-2de7-48ec-9a6c-f7cf37538bdd\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-857r4" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.535271 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/577a8444-c6e3-4aae-922e-12c7cb3b0b11-console-config\") pod \"console-f9d7485db-7pt7w\" (UID: \"577a8444-c6e3-4aae-922e-12c7cb3b0b11\") " pod="openshift-console/console-f9d7485db-7pt7w" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.535291 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/56d0b0e8-6440-4f28-9d05-ad7be713a117-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-4pj4c\" (UID: \"56d0b0e8-6440-4f28-9d05-ad7be713a117\") " pod="openshift-authentication/oauth-openshift-558db77b4-4pj4c" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.535309 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/b3ccc1f5-4945-4a14-8f84-363683bbd575-installation-pull-secrets\") pod \"image-registry-697d97f7c8-fnpzd\" (UID: \"b3ccc1f5-4945-4a14-8f84-363683bbd575\") " pod="openshift-image-registry/image-registry-697d97f7c8-fnpzd" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.535328 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/56d0b0e8-6440-4f28-9d05-ad7be713a117-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-4pj4c\" (UID: \"56d0b0e8-6440-4f28-9d05-ad7be713a117\") " pod="openshift-authentication/oauth-openshift-558db77b4-4pj4c" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.535348 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wtzp9\" (UniqueName: \"kubernetes.io/projected/39e18b3b-156d-46e5-9ace-51ee36c17614-kube-api-access-wtzp9\") pod \"machine-api-operator-5694c8668f-lwr4w\" (UID: \"39e18b3b-156d-46e5-9ace-51ee36c17614\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-lwr4w" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.535368 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/56d0b0e8-6440-4f28-9d05-ad7be713a117-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-4pj4c\" (UID: \"56d0b0e8-6440-4f28-9d05-ad7be713a117\") " pod="openshift-authentication/oauth-openshift-558db77b4-4pj4c" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.535388 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/485f1f0b-73b1-4681-9edb-683eda716bde-config\") pod \"service-ca-operator-777779d784-ln92w\" (UID: \"485f1f0b-73b1-4681-9edb-683eda716bde\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-ln92w" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.535420 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/64637bf9-60f4-4394-986b-b2fa4d7fb780-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-r647z\" (UID: \"64637bf9-60f4-4394-986b-b2fa4d7fb780\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-r647z" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.535440 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/56d0b0e8-6440-4f28-9d05-ad7be713a117-audit-policies\") pod \"oauth-openshift-558db77b4-4pj4c\" (UID: \"56d0b0e8-6440-4f28-9d05-ad7be713a117\") " pod="openshift-authentication/oauth-openshift-558db77b4-4pj4c" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.535457 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b3ccc1f5-4945-4a14-8f84-363683bbd575-trusted-ca\") pod \"image-registry-697d97f7c8-fnpzd\" (UID: \"b3ccc1f5-4945-4a14-8f84-363683bbd575\") " pod="openshift-image-registry/image-registry-697d97f7c8-fnpzd" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.535493 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8zfsz\" (UniqueName: \"kubernetes.io/projected/391aee9c-3245-49c5-a150-9d95b16b3c61-kube-api-access-8zfsz\") pod \"multus-admission-controller-857f4d67dd-dvq4s\" (UID: \"391aee9c-3245-49c5-a150-9d95b16b3c61\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-dvq4s" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.535514 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09b3ca22-55ce-4f12-9f35-308b6020819f-serving-cert\") pod \"etcd-operator-b45778765-w569t\" (UID: \"09b3ca22-55ce-4f12-9f35-308b6020819f\") " pod="openshift-etcd-operator/etcd-operator-b45778765-w569t" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.535544 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/493abf55-6e87-4745-a90b-5564a4e42dab-config\") pod \"authentication-operator-69f744f599-bp8h2\" (UID: \"493abf55-6e87-4745-a90b-5564a4e42dab\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-bp8h2" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.535563 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/33d62c67-3fa8-48ca-b984-820c3061a4ae-metrics-tls\") pod \"dns-default-kmxdh\" (UID: \"33d62c67-3fa8-48ca-b984-820c3061a4ae\") " pod="openshift-dns/dns-default-kmxdh" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.535580 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09b3ca22-55ce-4f12-9f35-308b6020819f-etcd-service-ca\") pod \"etcd-operator-b45778765-w569t\" (UID: \"09b3ca22-55ce-4f12-9f35-308b6020819f\") " pod="openshift-etcd-operator/etcd-operator-b45778765-w569t" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.535601 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pjlcz\" (UniqueName: \"kubernetes.io/projected/0e5e4bed-dea6-4ddd-80a9-2aac8e7f8909-kube-api-access-pjlcz\") pod \"openshift-config-operator-7777fb866f-ktzhf\" (UID: \"0e5e4bed-dea6-4ddd-80a9-2aac8e7f8909\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-ktzhf" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.535620 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5lxcf\" (UniqueName: \"kubernetes.io/projected/09b3ca22-55ce-4f12-9f35-308b6020819f-kube-api-access-5lxcf\") pod \"etcd-operator-b45778765-w569t\" (UID: \"09b3ca22-55ce-4f12-9f35-308b6020819f\") " pod="openshift-etcd-operator/etcd-operator-b45778765-w569t" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.535643 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/577a8444-c6e3-4aae-922e-12c7cb3b0b11-trusted-ca-bundle\") pod \"console-f9d7485db-7pt7w\" (UID: \"577a8444-c6e3-4aae-922e-12c7cb3b0b11\") " pod="openshift-console/console-f9d7485db-7pt7w" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.535660 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/56d0b0e8-6440-4f28-9d05-ad7be713a117-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-4pj4c\" (UID: \"56d0b0e8-6440-4f28-9d05-ad7be713a117\") " pod="openshift-authentication/oauth-openshift-558db77b4-4pj4c" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.535677 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4hdqg\" (UniqueName: \"kubernetes.io/projected/577a8444-c6e3-4aae-922e-12c7cb3b0b11-kube-api-access-4hdqg\") pod \"console-f9d7485db-7pt7w\" (UID: \"577a8444-c6e3-4aae-922e-12c7cb3b0b11\") " pod="openshift-console/console-f9d7485db-7pt7w" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.535697 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/0e5e4bed-dea6-4ddd-80a9-2aac8e7f8909-available-featuregates\") pod \"openshift-config-operator-7777fb866f-ktzhf\" (UID: \"0e5e4bed-dea6-4ddd-80a9-2aac8e7f8909\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-ktzhf" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.535714 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09b3ca22-55ce-4f12-9f35-308b6020819f-etcd-ca\") pod \"etcd-operator-b45778765-w569t\" (UID: \"09b3ca22-55ce-4f12-9f35-308b6020819f\") " pod="openshift-etcd-operator/etcd-operator-b45778765-w569t" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.535731 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/440c70b7-786c-454b-9910-11923a2cf456-signing-cabundle\") pod \"service-ca-9c57cc56f-t4h92\" (UID: \"440c70b7-786c-454b-9910-11923a2cf456\") " pod="openshift-service-ca/service-ca-9c57cc56f-t4h92" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.535768 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j66wn\" (UniqueName: \"kubernetes.io/projected/d813bd44-0760-4757-95da-beced796238f-kube-api-access-j66wn\") pod \"csi-hostpathplugin-xkp78\" (UID: \"d813bd44-0760-4757-95da-beced796238f\") " pod="hostpath-provisioner/csi-hostpathplugin-xkp78" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.535787 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/b3ccc1f5-4945-4a14-8f84-363683bbd575-ca-trust-extracted\") pod \"image-registry-697d97f7c8-fnpzd\" (UID: \"b3ccc1f5-4945-4a14-8f84-363683bbd575\") " pod="openshift-image-registry/image-registry-697d97f7c8-fnpzd" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.535815 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/493abf55-6e87-4745-a90b-5564a4e42dab-service-ca-bundle\") pod \"authentication-operator-69f744f599-bp8h2\" (UID: \"493abf55-6e87-4745-a90b-5564a4e42dab\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-bp8h2" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.535841 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/b3ccc1f5-4945-4a14-8f84-363683bbd575-registry-tls\") pod \"image-registry-697d97f7c8-fnpzd\" (UID: \"b3ccc1f5-4945-4a14-8f84-363683bbd575\") " pod="openshift-image-registry/image-registry-697d97f7c8-fnpzd" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.535875 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f5gnc\" (UniqueName: \"kubernetes.io/projected/5a99395b-1e1f-425f-b934-8ad850a2e8a5-kube-api-access-f5gnc\") pod \"machine-config-controller-84d6567774-sp272\" (UID: \"5a99395b-1e1f-425f-b934-8ad850a2e8a5\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-sp272" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.535902 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mskst\" (UniqueName: \"kubernetes.io/projected/7ac58a67-2de7-48ec-9a6c-f7cf37538bdd-kube-api-access-mskst\") pod \"control-plane-machine-set-operator-78cbb6b69f-857r4\" (UID: \"7ac58a67-2de7-48ec-9a6c-f7cf37538bdd\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-857r4" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.535920 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c465eadf-19c1-417c-bf6e-8f4eb6d26338-serving-cert\") pod \"console-operator-58897d9998-bpj7h\" (UID: \"c465eadf-19c1-417c-bf6e-8f4eb6d26338\") " pod="openshift-console-operator/console-operator-58897d9998-bpj7h" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.535936 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rcnfb\" (UniqueName: \"kubernetes.io/projected/7467cb63-a3bc-42a9-88ad-f61eb5475110-kube-api-access-rcnfb\") pod \"ingress-canary-f6x5z\" (UID: \"7467cb63-a3bc-42a9-88ad-f61eb5475110\") " pod="openshift-ingress-canary/ingress-canary-f6x5z" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.535951 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cv576\" (UniqueName: \"kubernetes.io/projected/33d62c67-3fa8-48ca-b984-820c3061a4ae-kube-api-access-cv576\") pod \"dns-default-kmxdh\" (UID: \"33d62c67-3fa8-48ca-b984-820c3061a4ae\") " pod="openshift-dns/dns-default-kmxdh" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.535970 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6cf82\" (UniqueName: \"kubernetes.io/projected/358b1904-d49e-40a5-b5a7-624709da4e55-kube-api-access-6cf82\") pod \"cluster-samples-operator-665b6dd947-8wffr\" (UID: \"358b1904-d49e-40a5-b5a7-624709da4e55\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-8wffr" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.535994 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/56d0b0e8-6440-4f28-9d05-ad7be713a117-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-4pj4c\" (UID: \"56d0b0e8-6440-4f28-9d05-ad7be713a117\") " pod="openshift-authentication/oauth-openshift-558db77b4-4pj4c" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.536012 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/769115af-182e-4e4b-b39e-411bf0d27747-certs\") pod \"machine-config-server-n4krv\" (UID: \"769115af-182e-4e4b-b39e-411bf0d27747\") " pod="openshift-machine-config-operator/machine-config-server-n4krv" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.536027 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vrxvb\" (UniqueName: \"kubernetes.io/projected/769115af-182e-4e4b-b39e-411bf0d27747-kube-api-access-vrxvb\") pod \"machine-config-server-n4krv\" (UID: \"769115af-182e-4e4b-b39e-411bf0d27747\") " pod="openshift-machine-config-operator/machine-config-server-n4krv" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.536042 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7wk22\" (UniqueName: \"kubernetes.io/projected/662a9f05-f1a7-4d9d-8b42-daadfeddb122-kube-api-access-7wk22\") pod \"downloads-7954f5f757-7phmq\" (UID: \"662a9f05-f1a7-4d9d-8b42-daadfeddb122\") " pod="openshift-console/downloads-7954f5f757-7phmq" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.536061 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0e5e4bed-dea6-4ddd-80a9-2aac8e7f8909-serving-cert\") pod \"openshift-config-operator-7777fb866f-ktzhf\" (UID: \"0e5e4bed-dea6-4ddd-80a9-2aac8e7f8909\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-ktzhf" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.536079 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-thdfx\" (UniqueName: \"kubernetes.io/projected/a5fc467b-0e4c-4f20-9729-56906756b33d-kube-api-access-thdfx\") pod \"collect-profiles-29319525-kwqwh\" (UID: \"a5fc467b-0e4c-4f20-9729-56906756b33d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319525-kwqwh" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.536094 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/577a8444-c6e3-4aae-922e-12c7cb3b0b11-oauth-serving-cert\") pod \"console-f9d7485db-7pt7w\" (UID: \"577a8444-c6e3-4aae-922e-12c7cb3b0b11\") " pod="openshift-console/console-f9d7485db-7pt7w" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.536120 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/5a99395b-1e1f-425f-b934-8ad850a2e8a5-proxy-tls\") pod \"machine-config-controller-84d6567774-sp272\" (UID: \"5a99395b-1e1f-425f-b934-8ad850a2e8a5\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-sp272" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.536136 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/440c70b7-786c-454b-9910-11923a2cf456-signing-key\") pod \"service-ca-9c57cc56f-t4h92\" (UID: \"440c70b7-786c-454b-9910-11923a2cf456\") " pod="openshift-service-ca/service-ca-9c57cc56f-t4h92" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.536162 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/56d0b0e8-6440-4f28-9d05-ad7be713a117-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-4pj4c\" (UID: \"56d0b0e8-6440-4f28-9d05-ad7be713a117\") " pod="openshift-authentication/oauth-openshift-558db77b4-4pj4c" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.536181 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/56d0b0e8-6440-4f28-9d05-ad7be713a117-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-4pj4c\" (UID: \"56d0b0e8-6440-4f28-9d05-ad7be713a117\") " pod="openshift-authentication/oauth-openshift-558db77b4-4pj4c" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.536201 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/64637bf9-60f4-4394-986b-b2fa4d7fb780-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-r647z\" (UID: \"64637bf9-60f4-4394-986b-b2fa4d7fb780\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-r647z" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.536219 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mttjt\" (UniqueName: \"kubernetes.io/projected/56d0b0e8-6440-4f28-9d05-ad7be713a117-kube-api-access-mttjt\") pod \"oauth-openshift-558db77b4-4pj4c\" (UID: \"56d0b0e8-6440-4f28-9d05-ad7be713a117\") " pod="openshift-authentication/oauth-openshift-558db77b4-4pj4c" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.536235 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/b3ccc1f5-4945-4a14-8f84-363683bbd575-bound-sa-token\") pod \"image-registry-697d97f7c8-fnpzd\" (UID: \"b3ccc1f5-4945-4a14-8f84-363683bbd575\") " pod="openshift-image-registry/image-registry-697d97f7c8-fnpzd" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.536252 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rw2tk\" (UniqueName: \"kubernetes.io/projected/b3ccc1f5-4945-4a14-8f84-363683bbd575-kube-api-access-rw2tk\") pod \"image-registry-697d97f7c8-fnpzd\" (UID: \"b3ccc1f5-4945-4a14-8f84-363683bbd575\") " pod="openshift-image-registry/image-registry-697d97f7c8-fnpzd" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.536268 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/d813bd44-0760-4757-95da-beced796238f-socket-dir\") pod \"csi-hostpathplugin-xkp78\" (UID: \"d813bd44-0760-4757-95da-beced796238f\") " pod="hostpath-provisioner/csi-hostpathplugin-xkp78" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.536287 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/358b1904-d49e-40a5-b5a7-624709da4e55-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-8wffr\" (UID: \"358b1904-d49e-40a5-b5a7-624709da4e55\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-8wffr" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.536302 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/56d0b0e8-6440-4f28-9d05-ad7be713a117-audit-dir\") pod \"oauth-openshift-558db77b4-4pj4c\" (UID: \"56d0b0e8-6440-4f28-9d05-ad7be713a117\") " pod="openshift-authentication/oauth-openshift-558db77b4-4pj4c" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.536317 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/577a8444-c6e3-4aae-922e-12c7cb3b0b11-console-oauth-config\") pod \"console-f9d7485db-7pt7w\" (UID: \"577a8444-c6e3-4aae-922e-12c7cb3b0b11\") " pod="openshift-console/console-f9d7485db-7pt7w" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.536334 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/4d8c1f74-cc55-4f70-afea-f177b99ec47c-metrics-tls\") pod \"dns-operator-744455d44c-vw7wd\" (UID: \"4d8c1f74-cc55-4f70-afea-f177b99ec47c\") " pod="openshift-dns-operator/dns-operator-744455d44c-vw7wd" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.536350 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09b3ca22-55ce-4f12-9f35-308b6020819f-config\") pod \"etcd-operator-b45778765-w569t\" (UID: \"09b3ca22-55ce-4f12-9f35-308b6020819f\") " pod="openshift-etcd-operator/etcd-operator-b45778765-w569t" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.536364 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/493abf55-6e87-4745-a90b-5564a4e42dab-serving-cert\") pod \"authentication-operator-69f744f599-bp8h2\" (UID: \"493abf55-6e87-4745-a90b-5564a4e42dab\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-bp8h2" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.536380 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/a5fc467b-0e4c-4f20-9729-56906756b33d-secret-volume\") pod \"collect-profiles-29319525-kwqwh\" (UID: \"a5fc467b-0e4c-4f20-9729-56906756b33d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319525-kwqwh" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.536398 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/769115af-182e-4e4b-b39e-411bf0d27747-node-bootstrap-token\") pod \"machine-config-server-n4krv\" (UID: \"769115af-182e-4e4b-b39e-411bf0d27747\") " pod="openshift-machine-config-operator/machine-config-server-n4krv" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.536416 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/39e18b3b-156d-46e5-9ace-51ee36c17614-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-lwr4w\" (UID: \"39e18b3b-156d-46e5-9ace-51ee36c17614\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-lwr4w" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.536433 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rl9j7\" (UniqueName: \"kubernetes.io/projected/485f1f0b-73b1-4681-9edb-683eda716bde-kube-api-access-rl9j7\") pod \"service-ca-operator-777779d784-ln92w\" (UID: \"485f1f0b-73b1-4681-9edb-683eda716bde\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-ln92w" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.536458 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/d813bd44-0760-4757-95da-beced796238f-registration-dir\") pod \"csi-hostpathplugin-xkp78\" (UID: \"d813bd44-0760-4757-95da-beced796238f\") " pod="hostpath-provisioner/csi-hostpathplugin-xkp78" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.536475 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bmhgp\" (UniqueName: \"kubernetes.io/projected/c465eadf-19c1-417c-bf6e-8f4eb6d26338-kube-api-access-bmhgp\") pod \"console-operator-58897d9998-bpj7h\" (UID: \"c465eadf-19c1-417c-bf6e-8f4eb6d26338\") " pod="openshift-console-operator/console-operator-58897d9998-bpj7h" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.536499 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c465eadf-19c1-417c-bf6e-8f4eb6d26338-config\") pod \"console-operator-58897d9998-bpj7h\" (UID: \"c465eadf-19c1-417c-bf6e-8f4eb6d26338\") " pod="openshift-console-operator/console-operator-58897d9998-bpj7h" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.536525 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/c465eadf-19c1-417c-bf6e-8f4eb6d26338-trusted-ca\") pod \"console-operator-58897d9998-bpj7h\" (UID: \"c465eadf-19c1-417c-bf6e-8f4eb6d26338\") " pod="openshift-console-operator/console-operator-58897d9998-bpj7h" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.536541 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/d813bd44-0760-4757-95da-beced796238f-csi-data-dir\") pod \"csi-hostpathplugin-xkp78\" (UID: \"d813bd44-0760-4757-95da-beced796238f\") " pod="hostpath-provisioner/csi-hostpathplugin-xkp78" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.536567 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-95rbn\" (UniqueName: \"kubernetes.io/projected/64637bf9-60f4-4394-986b-b2fa4d7fb780-kube-api-access-95rbn\") pod \"openshift-controller-manager-operator-756b6f6bc6-r647z\" (UID: \"64637bf9-60f4-4394-986b-b2fa4d7fb780\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-r647z" Sep 29 18:58:50 crc kubenswrapper[4792]: E0929 18:58:50.536843 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 18:58:51.036819405 +0000 UTC m=+143.030126801 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.538612 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/b3ccc1f5-4945-4a14-8f84-363683bbd575-registry-certificates\") pod \"image-registry-697d97f7c8-fnpzd\" (UID: \"b3ccc1f5-4945-4a14-8f84-363683bbd575\") " pod="openshift-image-registry/image-registry-697d97f7c8-fnpzd" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.539673 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"images\" (UniqueName: \"kubernetes.io/configmap/39e18b3b-156d-46e5-9ace-51ee36c17614-images\") pod \"machine-api-operator-5694c8668f-lwr4w\" (UID: \"39e18b3b-156d-46e5-9ace-51ee36c17614\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-lwr4w" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.539731 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/a5fc467b-0e4c-4f20-9729-56906756b33d-config-volume\") pod \"collect-profiles-29319525-kwqwh\" (UID: \"a5fc467b-0e4c-4f20-9729-56906756b33d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319525-kwqwh" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.545064 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/5a99395b-1e1f-425f-b934-8ad850a2e8a5-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-sp272\" (UID: \"5a99395b-1e1f-425f-b934-8ad850a2e8a5\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-sp272" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.545101 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b3ccc1f5-4945-4a14-8f84-363683bbd575-trusted-ca\") pod \"image-registry-697d97f7c8-fnpzd\" (UID: \"b3ccc1f5-4945-4a14-8f84-363683bbd575\") " pod="openshift-image-registry/image-registry-697d97f7c8-fnpzd" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.545166 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/64637bf9-60f4-4394-986b-b2fa4d7fb780-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-r647z\" (UID: \"64637bf9-60f4-4394-986b-b2fa4d7fb780\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-r647z" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.555396 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/391aee9c-3245-49c5-a150-9d95b16b3c61-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-dvq4s\" (UID: \"391aee9c-3245-49c5-a150-9d95b16b3c61\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-dvq4s" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.555613 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/b3ccc1f5-4945-4a14-8f84-363683bbd575-ca-trust-extracted\") pod \"image-registry-697d97f7c8-fnpzd\" (UID: \"b3ccc1f5-4945-4a14-8f84-363683bbd575\") " pod="openshift-image-registry/image-registry-697d97f7c8-fnpzd" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.560409 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-hr4cm" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.564567 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/39e18b3b-156d-46e5-9ace-51ee36c17614-config\") pod \"machine-api-operator-5694c8668f-lwr4w\" (UID: \"39e18b3b-156d-46e5-9ace-51ee36c17614\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-lwr4w" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.565418 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-b89qf"] Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.565444 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/64637bf9-60f4-4394-986b-b2fa4d7fb780-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-r647z\" (UID: \"64637bf9-60f4-4394-986b-b2fa4d7fb780\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-r647z" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.566096 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/358b1904-d49e-40a5-b5a7-624709da4e55-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-8wffr\" (UID: \"358b1904-d49e-40a5-b5a7-624709da4e55\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-8wffr" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.566943 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0e5e4bed-dea6-4ddd-80a9-2aac8e7f8909-serving-cert\") pod \"openshift-config-operator-7777fb866f-ktzhf\" (UID: \"0e5e4bed-dea6-4ddd-80a9-2aac8e7f8909\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-ktzhf" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.569244 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/0e5e4bed-dea6-4ddd-80a9-2aac8e7f8909-available-featuregates\") pod \"openshift-config-operator-7777fb866f-ktzhf\" (UID: \"0e5e4bed-dea6-4ddd-80a9-2aac8e7f8909\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-ktzhf" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.588405 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-95rbn\" (UniqueName: \"kubernetes.io/projected/64637bf9-60f4-4394-986b-b2fa4d7fb780-kube-api-access-95rbn\") pod \"openshift-controller-manager-operator-756b6f6bc6-r647z\" (UID: \"64637bf9-60f4-4394-986b-b2fa4d7fb780\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-r647z" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.589714 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/5a99395b-1e1f-425f-b934-8ad850a2e8a5-proxy-tls\") pod \"machine-config-controller-84d6567774-sp272\" (UID: \"5a99395b-1e1f-425f-b934-8ad850a2e8a5\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-sp272" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.589809 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/a5fc467b-0e4c-4f20-9729-56906756b33d-secret-volume\") pod \"collect-profiles-29319525-kwqwh\" (UID: \"a5fc467b-0e4c-4f20-9729-56906756b33d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319525-kwqwh" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.590141 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/b3ccc1f5-4945-4a14-8f84-363683bbd575-installation-pull-secrets\") pod \"image-registry-697d97f7c8-fnpzd\" (UID: \"b3ccc1f5-4945-4a14-8f84-363683bbd575\") " pod="openshift-image-registry/image-registry-697d97f7c8-fnpzd" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.593190 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/39e18b3b-156d-46e5-9ace-51ee36c17614-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-lwr4w\" (UID: \"39e18b3b-156d-46e5-9ace-51ee36c17614\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-lwr4w" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.603508 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wtzp9\" (UniqueName: \"kubernetes.io/projected/39e18b3b-156d-46e5-9ace-51ee36c17614-kube-api-access-wtzp9\") pod \"machine-api-operator-5694c8668f-lwr4w\" (UID: \"39e18b3b-156d-46e5-9ace-51ee36c17614\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-lwr4w" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.607659 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/b3ccc1f5-4945-4a14-8f84-363683bbd575-registry-tls\") pod \"image-registry-697d97f7c8-fnpzd\" (UID: \"b3ccc1f5-4945-4a14-8f84-363683bbd575\") " pod="openshift-image-registry/image-registry-697d97f7c8-fnpzd" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.645735 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09b3ca22-55ce-4f12-9f35-308b6020819f-etcd-ca\") pod \"etcd-operator-b45778765-w569t\" (UID: \"09b3ca22-55ce-4f12-9f35-308b6020819f\") " pod="openshift-etcd-operator/etcd-operator-b45778765-w569t" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.645796 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/440c70b7-786c-454b-9910-11923a2cf456-signing-cabundle\") pod \"service-ca-9c57cc56f-t4h92\" (UID: \"440c70b7-786c-454b-9910-11923a2cf456\") " pod="openshift-service-ca/service-ca-9c57cc56f-t4h92" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.645831 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j66wn\" (UniqueName: \"kubernetes.io/projected/d813bd44-0760-4757-95da-beced796238f-kube-api-access-j66wn\") pod \"csi-hostpathplugin-xkp78\" (UID: \"d813bd44-0760-4757-95da-beced796238f\") " pod="hostpath-provisioner/csi-hostpathplugin-xkp78" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.645935 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-fnpzd\" (UID: \"b3ccc1f5-4945-4a14-8f84-363683bbd575\") " pod="openshift-image-registry/image-registry-697d97f7c8-fnpzd" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.645990 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/493abf55-6e87-4745-a90b-5564a4e42dab-service-ca-bundle\") pod \"authentication-operator-69f744f599-bp8h2\" (UID: \"493abf55-6e87-4745-a90b-5564a4e42dab\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-bp8h2" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.646065 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mskst\" (UniqueName: \"kubernetes.io/projected/7ac58a67-2de7-48ec-9a6c-f7cf37538bdd-kube-api-access-mskst\") pod \"control-plane-machine-set-operator-78cbb6b69f-857r4\" (UID: \"7ac58a67-2de7-48ec-9a6c-f7cf37538bdd\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-857r4" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.646091 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c465eadf-19c1-417c-bf6e-8f4eb6d26338-serving-cert\") pod \"console-operator-58897d9998-bpj7h\" (UID: \"c465eadf-19c1-417c-bf6e-8f4eb6d26338\") " pod="openshift-console-operator/console-operator-58897d9998-bpj7h" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.646118 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rcnfb\" (UniqueName: \"kubernetes.io/projected/7467cb63-a3bc-42a9-88ad-f61eb5475110-kube-api-access-rcnfb\") pod \"ingress-canary-f6x5z\" (UID: \"7467cb63-a3bc-42a9-88ad-f61eb5475110\") " pod="openshift-ingress-canary/ingress-canary-f6x5z" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.646143 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cv576\" (UniqueName: \"kubernetes.io/projected/33d62c67-3fa8-48ca-b984-820c3061a4ae-kube-api-access-cv576\") pod \"dns-default-kmxdh\" (UID: \"33d62c67-3fa8-48ca-b984-820c3061a4ae\") " pod="openshift-dns/dns-default-kmxdh" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.646196 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/56d0b0e8-6440-4f28-9d05-ad7be713a117-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-4pj4c\" (UID: \"56d0b0e8-6440-4f28-9d05-ad7be713a117\") " pod="openshift-authentication/oauth-openshift-558db77b4-4pj4c" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.646223 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7wk22\" (UniqueName: \"kubernetes.io/projected/662a9f05-f1a7-4d9d-8b42-daadfeddb122-kube-api-access-7wk22\") pod \"downloads-7954f5f757-7phmq\" (UID: \"662a9f05-f1a7-4d9d-8b42-daadfeddb122\") " pod="openshift-console/downloads-7954f5f757-7phmq" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.646253 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/769115af-182e-4e4b-b39e-411bf0d27747-certs\") pod \"machine-config-server-n4krv\" (UID: \"769115af-182e-4e4b-b39e-411bf0d27747\") " pod="openshift-machine-config-operator/machine-config-server-n4krv" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.646279 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vrxvb\" (UniqueName: \"kubernetes.io/projected/769115af-182e-4e4b-b39e-411bf0d27747-kube-api-access-vrxvb\") pod \"machine-config-server-n4krv\" (UID: \"769115af-182e-4e4b-b39e-411bf0d27747\") " pod="openshift-machine-config-operator/machine-config-server-n4krv" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.646312 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/577a8444-c6e3-4aae-922e-12c7cb3b0b11-oauth-serving-cert\") pod \"console-f9d7485db-7pt7w\" (UID: \"577a8444-c6e3-4aae-922e-12c7cb3b0b11\") " pod="openshift-console/console-f9d7485db-7pt7w" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.646394 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/440c70b7-786c-454b-9910-11923a2cf456-signing-key\") pod \"service-ca-9c57cc56f-t4h92\" (UID: \"440c70b7-786c-454b-9910-11923a2cf456\") " pod="openshift-service-ca/service-ca-9c57cc56f-t4h92" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.646427 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/56d0b0e8-6440-4f28-9d05-ad7be713a117-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-4pj4c\" (UID: \"56d0b0e8-6440-4f28-9d05-ad7be713a117\") " pod="openshift-authentication/oauth-openshift-558db77b4-4pj4c" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.646454 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/56d0b0e8-6440-4f28-9d05-ad7be713a117-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-4pj4c\" (UID: \"56d0b0e8-6440-4f28-9d05-ad7be713a117\") " pod="openshift-authentication/oauth-openshift-558db77b4-4pj4c" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.646485 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mttjt\" (UniqueName: \"kubernetes.io/projected/56d0b0e8-6440-4f28-9d05-ad7be713a117-kube-api-access-mttjt\") pod \"oauth-openshift-558db77b4-4pj4c\" (UID: \"56d0b0e8-6440-4f28-9d05-ad7be713a117\") " pod="openshift-authentication/oauth-openshift-558db77b4-4pj4c" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.646528 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/d813bd44-0760-4757-95da-beced796238f-socket-dir\") pod \"csi-hostpathplugin-xkp78\" (UID: \"d813bd44-0760-4757-95da-beced796238f\") " pod="hostpath-provisioner/csi-hostpathplugin-xkp78" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.646555 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/56d0b0e8-6440-4f28-9d05-ad7be713a117-audit-dir\") pod \"oauth-openshift-558db77b4-4pj4c\" (UID: \"56d0b0e8-6440-4f28-9d05-ad7be713a117\") " pod="openshift-authentication/oauth-openshift-558db77b4-4pj4c" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.646581 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/4d8c1f74-cc55-4f70-afea-f177b99ec47c-metrics-tls\") pod \"dns-operator-744455d44c-vw7wd\" (UID: \"4d8c1f74-cc55-4f70-afea-f177b99ec47c\") " pod="openshift-dns-operator/dns-operator-744455d44c-vw7wd" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.646602 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/577a8444-c6e3-4aae-922e-12c7cb3b0b11-console-oauth-config\") pod \"console-f9d7485db-7pt7w\" (UID: \"577a8444-c6e3-4aae-922e-12c7cb3b0b11\") " pod="openshift-console/console-f9d7485db-7pt7w" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.646625 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09b3ca22-55ce-4f12-9f35-308b6020819f-config\") pod \"etcd-operator-b45778765-w569t\" (UID: \"09b3ca22-55ce-4f12-9f35-308b6020819f\") " pod="openshift-etcd-operator/etcd-operator-b45778765-w569t" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.646648 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/493abf55-6e87-4745-a90b-5564a4e42dab-serving-cert\") pod \"authentication-operator-69f744f599-bp8h2\" (UID: \"493abf55-6e87-4745-a90b-5564a4e42dab\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-bp8h2" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.646673 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/769115af-182e-4e4b-b39e-411bf0d27747-node-bootstrap-token\") pod \"machine-config-server-n4krv\" (UID: \"769115af-182e-4e4b-b39e-411bf0d27747\") " pod="openshift-machine-config-operator/machine-config-server-n4krv" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.646702 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rl9j7\" (UniqueName: \"kubernetes.io/projected/485f1f0b-73b1-4681-9edb-683eda716bde-kube-api-access-rl9j7\") pod \"service-ca-operator-777779d784-ln92w\" (UID: \"485f1f0b-73b1-4681-9edb-683eda716bde\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-ln92w" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.646735 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/d813bd44-0760-4757-95da-beced796238f-registration-dir\") pod \"csi-hostpathplugin-xkp78\" (UID: \"d813bd44-0760-4757-95da-beced796238f\") " pod="hostpath-provisioner/csi-hostpathplugin-xkp78" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.646773 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bmhgp\" (UniqueName: \"kubernetes.io/projected/c465eadf-19c1-417c-bf6e-8f4eb6d26338-kube-api-access-bmhgp\") pod \"console-operator-58897d9998-bpj7h\" (UID: \"c465eadf-19c1-417c-bf6e-8f4eb6d26338\") " pod="openshift-console-operator/console-operator-58897d9998-bpj7h" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.646802 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c465eadf-19c1-417c-bf6e-8f4eb6d26338-config\") pod \"console-operator-58897d9998-bpj7h\" (UID: \"c465eadf-19c1-417c-bf6e-8f4eb6d26338\") " pod="openshift-console-operator/console-operator-58897d9998-bpj7h" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.646831 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/c465eadf-19c1-417c-bf6e-8f4eb6d26338-trusted-ca\") pod \"console-operator-58897d9998-bpj7h\" (UID: \"c465eadf-19c1-417c-bf6e-8f4eb6d26338\") " pod="openshift-console-operator/console-operator-58897d9998-bpj7h" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.646881 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/d813bd44-0760-4757-95da-beced796238f-csi-data-dir\") pod \"csi-hostpathplugin-xkp78\" (UID: \"d813bd44-0760-4757-95da-beced796238f\") " pod="hostpath-provisioner/csi-hostpathplugin-xkp78" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.646914 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6tcpz\" (UniqueName: \"kubernetes.io/projected/4d8c1f74-cc55-4f70-afea-f177b99ec47c-kube-api-access-6tcpz\") pod \"dns-operator-744455d44c-vw7wd\" (UID: \"4d8c1f74-cc55-4f70-afea-f177b99ec47c\") " pod="openshift-dns-operator/dns-operator-744455d44c-vw7wd" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.646940 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/56d0b0e8-6440-4f28-9d05-ad7be713a117-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-4pj4c\" (UID: \"56d0b0e8-6440-4f28-9d05-ad7be713a117\") " pod="openshift-authentication/oauth-openshift-558db77b4-4pj4c" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.649325 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/d813bd44-0760-4757-95da-beced796238f-csi-data-dir\") pod \"csi-hostpathplugin-xkp78\" (UID: \"d813bd44-0760-4757-95da-beced796238f\") " pod="hostpath-provisioner/csi-hostpathplugin-xkp78" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.649643 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/d813bd44-0760-4757-95da-beced796238f-socket-dir\") pod \"csi-hostpathplugin-xkp78\" (UID: \"d813bd44-0760-4757-95da-beced796238f\") " pod="hostpath-provisioner/csi-hostpathplugin-xkp78" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.650275 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/c465eadf-19c1-417c-bf6e-8f4eb6d26338-trusted-ca\") pod \"console-operator-58897d9998-bpj7h\" (UID: \"c465eadf-19c1-417c-bf6e-8f4eb6d26338\") " pod="openshift-console-operator/console-operator-58897d9998-bpj7h" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.651308 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/7467cb63-a3bc-42a9-88ad-f61eb5475110-cert\") pod \"ingress-canary-f6x5z\" (UID: \"7467cb63-a3bc-42a9-88ad-f61eb5475110\") " pod="openshift-ingress-canary/ingress-canary-f6x5z" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.651345 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/d813bd44-0760-4757-95da-beced796238f-registration-dir\") pod \"csi-hostpathplugin-xkp78\" (UID: \"d813bd44-0760-4757-95da-beced796238f\") " pod="hostpath-provisioner/csi-hostpathplugin-xkp78" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.651369 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/56d0b0e8-6440-4f28-9d05-ad7be713a117-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-4pj4c\" (UID: \"56d0b0e8-6440-4f28-9d05-ad7be713a117\") " pod="openshift-authentication/oauth-openshift-558db77b4-4pj4c" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.651402 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/d813bd44-0760-4757-95da-beced796238f-plugins-dir\") pod \"csi-hostpathplugin-xkp78\" (UID: \"d813bd44-0760-4757-95da-beced796238f\") " pod="hostpath-provisioner/csi-hostpathplugin-xkp78" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.651442 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/56d0b0e8-6440-4f28-9d05-ad7be713a117-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-4pj4c\" (UID: \"56d0b0e8-6440-4f28-9d05-ad7be713a117\") " pod="openshift-authentication/oauth-openshift-558db77b4-4pj4c" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.651467 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/493abf55-6e87-4745-a90b-5564a4e42dab-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-bp8h2\" (UID: \"493abf55-6e87-4745-a90b-5564a4e42dab\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-bp8h2" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.651491 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/d813bd44-0760-4757-95da-beced796238f-mountpoint-dir\") pod \"csi-hostpathplugin-xkp78\" (UID: \"d813bd44-0760-4757-95da-beced796238f\") " pod="hostpath-provisioner/csi-hostpathplugin-xkp78" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.652681 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c465eadf-19c1-417c-bf6e-8f4eb6d26338-config\") pod \"console-operator-58897d9998-bpj7h\" (UID: \"c465eadf-19c1-417c-bf6e-8f4eb6d26338\") " pod="openshift-console-operator/console-operator-58897d9998-bpj7h" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.652934 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09b3ca22-55ce-4f12-9f35-308b6020819f-config\") pod \"etcd-operator-b45778765-w569t\" (UID: \"09b3ca22-55ce-4f12-9f35-308b6020819f\") " pod="openshift-etcd-operator/etcd-operator-b45778765-w569t" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.653874 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/56d0b0e8-6440-4f28-9d05-ad7be713a117-audit-dir\") pod \"oauth-openshift-558db77b4-4pj4c\" (UID: \"56d0b0e8-6440-4f28-9d05-ad7be713a117\") " pod="openshift-authentication/oauth-openshift-558db77b4-4pj4c" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.654060 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/56d0b0e8-6440-4f28-9d05-ad7be713a117-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-4pj4c\" (UID: \"56d0b0e8-6440-4f28-9d05-ad7be713a117\") " pod="openshift-authentication/oauth-openshift-558db77b4-4pj4c" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.655094 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/577a8444-c6e3-4aae-922e-12c7cb3b0b11-oauth-serving-cert\") pod \"console-f9d7485db-7pt7w\" (UID: \"577a8444-c6e3-4aae-922e-12c7cb3b0b11\") " pod="openshift-console/console-f9d7485db-7pt7w" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.655136 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/577a8444-c6e3-4aae-922e-12c7cb3b0b11-console-serving-cert\") pod \"console-f9d7485db-7pt7w\" (UID: \"577a8444-c6e3-4aae-922e-12c7cb3b0b11\") " pod="openshift-console/console-f9d7485db-7pt7w" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.671544 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6cf82\" (UniqueName: \"kubernetes.io/projected/358b1904-d49e-40a5-b5a7-624709da4e55-kube-api-access-6cf82\") pod \"cluster-samples-operator-665b6dd947-8wffr\" (UID: \"358b1904-d49e-40a5-b5a7-624709da4e55\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-8wffr" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.679160 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/485f1f0b-73b1-4681-9edb-683eda716bde-serving-cert\") pod \"service-ca-operator-777779d784-ln92w\" (UID: \"485f1f0b-73b1-4681-9edb-683eda716bde\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-ln92w" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.679234 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/56d0b0e8-6440-4f28-9d05-ad7be713a117-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-4pj4c\" (UID: \"56d0b0e8-6440-4f28-9d05-ad7be713a117\") " pod="openshift-authentication/oauth-openshift-558db77b4-4pj4c" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.679286 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hnknq\" (UniqueName: \"kubernetes.io/projected/493abf55-6e87-4745-a90b-5564a4e42dab-kube-api-access-hnknq\") pod \"authentication-operator-69f744f599-bp8h2\" (UID: \"493abf55-6e87-4745-a90b-5564a4e42dab\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-bp8h2" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.679319 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/33d62c67-3fa8-48ca-b984-820c3061a4ae-config-volume\") pod \"dns-default-kmxdh\" (UID: \"33d62c67-3fa8-48ca-b984-820c3061a4ae\") " pod="openshift-dns/dns-default-kmxdh" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.679366 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/577a8444-c6e3-4aae-922e-12c7cb3b0b11-service-ca\") pod \"console-f9d7485db-7pt7w\" (UID: \"577a8444-c6e3-4aae-922e-12c7cb3b0b11\") " pod="openshift-console/console-f9d7485db-7pt7w" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.679604 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/440c70b7-786c-454b-9910-11923a2cf456-signing-key\") pod \"service-ca-9c57cc56f-t4h92\" (UID: \"440c70b7-786c-454b-9910-11923a2cf456\") " pod="openshift-service-ca/service-ca-9c57cc56f-t4h92" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.680296 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f5gnc\" (UniqueName: \"kubernetes.io/projected/5a99395b-1e1f-425f-b934-8ad850a2e8a5-kube-api-access-f5gnc\") pod \"machine-config-controller-84d6567774-sp272\" (UID: \"5a99395b-1e1f-425f-b934-8ad850a2e8a5\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-sp272" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.682933 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/d813bd44-0760-4757-95da-beced796238f-plugins-dir\") pod \"csi-hostpathplugin-xkp78\" (UID: \"d813bd44-0760-4757-95da-beced796238f\") " pod="hostpath-provisioner/csi-hostpathplugin-xkp78" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.683162 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/440c70b7-786c-454b-9910-11923a2cf456-signing-cabundle\") pod \"service-ca-9c57cc56f-t4h92\" (UID: \"440c70b7-786c-454b-9910-11923a2cf456\") " pod="openshift-service-ca/service-ca-9c57cc56f-t4h92" Sep 29 18:58:50 crc kubenswrapper[4792]: E0929 18:58:50.684149 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 18:58:51.184128499 +0000 UTC m=+143.177435895 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-fnpzd" (UID: "b3ccc1f5-4945-4a14-8f84-363683bbd575") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.685992 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/d813bd44-0760-4757-95da-beced796238f-mountpoint-dir\") pod \"csi-hostpathplugin-xkp78\" (UID: \"d813bd44-0760-4757-95da-beced796238f\") " pod="hostpath-provisioner/csi-hostpathplugin-xkp78" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.686584 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/493abf55-6e87-4745-a90b-5564a4e42dab-service-ca-bundle\") pod \"authentication-operator-69f744f599-bp8h2\" (UID: \"493abf55-6e87-4745-a90b-5564a4e42dab\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-bp8h2" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.688814 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/493abf55-6e87-4745-a90b-5564a4e42dab-serving-cert\") pod \"authentication-operator-69f744f599-bp8h2\" (UID: \"493abf55-6e87-4745-a90b-5564a4e42dab\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-bp8h2" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.689017 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nrv4c\" (UniqueName: \"kubernetes.io/projected/440c70b7-786c-454b-9910-11923a2cf456-kube-api-access-nrv4c\") pod \"service-ca-9c57cc56f-t4h92\" (UID: \"440c70b7-786c-454b-9910-11923a2cf456\") " pod="openshift-service-ca/service-ca-9c57cc56f-t4h92" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.689331 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09b3ca22-55ce-4f12-9f35-308b6020819f-etcd-client\") pod \"etcd-operator-b45778765-w569t\" (UID: \"09b3ca22-55ce-4f12-9f35-308b6020819f\") " pod="openshift-etcd-operator/etcd-operator-b45778765-w569t" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.689919 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/4d8c1f74-cc55-4f70-afea-f177b99ec47c-metrics-tls\") pod \"dns-operator-744455d44c-vw7wd\" (UID: \"4d8c1f74-cc55-4f70-afea-f177b99ec47c\") " pod="openshift-dns-operator/dns-operator-744455d44c-vw7wd" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.691092 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/7ac58a67-2de7-48ec-9a6c-f7cf37538bdd-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-857r4\" (UID: \"7ac58a67-2de7-48ec-9a6c-f7cf37538bdd\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-857r4" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.691173 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/577a8444-c6e3-4aae-922e-12c7cb3b0b11-console-config\") pod \"console-f9d7485db-7pt7w\" (UID: \"577a8444-c6e3-4aae-922e-12c7cb3b0b11\") " pod="openshift-console/console-f9d7485db-7pt7w" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.691228 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/56d0b0e8-6440-4f28-9d05-ad7be713a117-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-4pj4c\" (UID: \"56d0b0e8-6440-4f28-9d05-ad7be713a117\") " pod="openshift-authentication/oauth-openshift-558db77b4-4pj4c" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.691270 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/56d0b0e8-6440-4f28-9d05-ad7be713a117-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-4pj4c\" (UID: \"56d0b0e8-6440-4f28-9d05-ad7be713a117\") " pod="openshift-authentication/oauth-openshift-558db77b4-4pj4c" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.691315 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/485f1f0b-73b1-4681-9edb-683eda716bde-config\") pod \"service-ca-operator-777779d784-ln92w\" (UID: \"485f1f0b-73b1-4681-9edb-683eda716bde\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-ln92w" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.691376 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/56d0b0e8-6440-4f28-9d05-ad7be713a117-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-4pj4c\" (UID: \"56d0b0e8-6440-4f28-9d05-ad7be713a117\") " pod="openshift-authentication/oauth-openshift-558db77b4-4pj4c" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.691414 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/56d0b0e8-6440-4f28-9d05-ad7be713a117-audit-policies\") pod \"oauth-openshift-558db77b4-4pj4c\" (UID: \"56d0b0e8-6440-4f28-9d05-ad7be713a117\") " pod="openshift-authentication/oauth-openshift-558db77b4-4pj4c" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.691494 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09b3ca22-55ce-4f12-9f35-308b6020819f-serving-cert\") pod \"etcd-operator-b45778765-w569t\" (UID: \"09b3ca22-55ce-4f12-9f35-308b6020819f\") " pod="openshift-etcd-operator/etcd-operator-b45778765-w569t" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.691543 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/493abf55-6e87-4745-a90b-5564a4e42dab-config\") pod \"authentication-operator-69f744f599-bp8h2\" (UID: \"493abf55-6e87-4745-a90b-5564a4e42dab\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-bp8h2" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.691589 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09b3ca22-55ce-4f12-9f35-308b6020819f-etcd-service-ca\") pod \"etcd-operator-b45778765-w569t\" (UID: \"09b3ca22-55ce-4f12-9f35-308b6020819f\") " pod="openshift-etcd-operator/etcd-operator-b45778765-w569t" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.691636 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/33d62c67-3fa8-48ca-b984-820c3061a4ae-metrics-tls\") pod \"dns-default-kmxdh\" (UID: \"33d62c67-3fa8-48ca-b984-820c3061a4ae\") " pod="openshift-dns/dns-default-kmxdh" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.691696 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5lxcf\" (UniqueName: \"kubernetes.io/projected/09b3ca22-55ce-4f12-9f35-308b6020819f-kube-api-access-5lxcf\") pod \"etcd-operator-b45778765-w569t\" (UID: \"09b3ca22-55ce-4f12-9f35-308b6020819f\") " pod="openshift-etcd-operator/etcd-operator-b45778765-w569t" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.691726 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/577a8444-c6e3-4aae-922e-12c7cb3b0b11-trusted-ca-bundle\") pod \"console-f9d7485db-7pt7w\" (UID: \"577a8444-c6e3-4aae-922e-12c7cb3b0b11\") " pod="openshift-console/console-f9d7485db-7pt7w" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.691752 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/56d0b0e8-6440-4f28-9d05-ad7be713a117-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-4pj4c\" (UID: \"56d0b0e8-6440-4f28-9d05-ad7be713a117\") " pod="openshift-authentication/oauth-openshift-558db77b4-4pj4c" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.691799 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4hdqg\" (UniqueName: \"kubernetes.io/projected/577a8444-c6e3-4aae-922e-12c7cb3b0b11-kube-api-access-4hdqg\") pod \"console-f9d7485db-7pt7w\" (UID: \"577a8444-c6e3-4aae-922e-12c7cb3b0b11\") " pod="openshift-console/console-f9d7485db-7pt7w" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.695580 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/56d0b0e8-6440-4f28-9d05-ad7be713a117-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-4pj4c\" (UID: \"56d0b0e8-6440-4f28-9d05-ad7be713a117\") " pod="openshift-authentication/oauth-openshift-558db77b4-4pj4c" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.697261 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/769115af-182e-4e4b-b39e-411bf0d27747-node-bootstrap-token\") pod \"machine-config-server-n4krv\" (UID: \"769115af-182e-4e4b-b39e-411bf0d27747\") " pod="openshift-machine-config-operator/machine-config-server-n4krv" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.697783 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c465eadf-19c1-417c-bf6e-8f4eb6d26338-serving-cert\") pod \"console-operator-58897d9998-bpj7h\" (UID: \"c465eadf-19c1-417c-bf6e-8f4eb6d26338\") " pod="openshift-console-operator/console-operator-58897d9998-bpj7h" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.698669 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/577a8444-c6e3-4aae-922e-12c7cb3b0b11-console-oauth-config\") pod \"console-f9d7485db-7pt7w\" (UID: \"577a8444-c6e3-4aae-922e-12c7cb3b0b11\") " pod="openshift-console/console-f9d7485db-7pt7w" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.698999 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/485f1f0b-73b1-4681-9edb-683eda716bde-config\") pod \"service-ca-operator-777779d784-ln92w\" (UID: \"485f1f0b-73b1-4681-9edb-683eda716bde\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-ln92w" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.700879 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/56d0b0e8-6440-4f28-9d05-ad7be713a117-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-4pj4c\" (UID: \"56d0b0e8-6440-4f28-9d05-ad7be713a117\") " pod="openshift-authentication/oauth-openshift-558db77b4-4pj4c" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.701256 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/577a8444-c6e3-4aae-922e-12c7cb3b0b11-console-serving-cert\") pod \"console-f9d7485db-7pt7w\" (UID: \"577a8444-c6e3-4aae-922e-12c7cb3b0b11\") " pod="openshift-console/console-f9d7485db-7pt7w" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.701526 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"certs\" (UniqueName: \"kubernetes.io/secret/769115af-182e-4e4b-b39e-411bf0d27747-certs\") pod \"machine-config-server-n4krv\" (UID: \"769115af-182e-4e4b-b39e-411bf0d27747\") " pod="openshift-machine-config-operator/machine-config-server-n4krv" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.701798 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/56d0b0e8-6440-4f28-9d05-ad7be713a117-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-4pj4c\" (UID: \"56d0b0e8-6440-4f28-9d05-ad7be713a117\") " pod="openshift-authentication/oauth-openshift-558db77b4-4pj4c" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.702141 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/56d0b0e8-6440-4f28-9d05-ad7be713a117-audit-policies\") pod \"oauth-openshift-558db77b4-4pj4c\" (UID: \"56d0b0e8-6440-4f28-9d05-ad7be713a117\") " pod="openshift-authentication/oauth-openshift-558db77b4-4pj4c" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.703117 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/56d0b0e8-6440-4f28-9d05-ad7be713a117-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-4pj4c\" (UID: \"56d0b0e8-6440-4f28-9d05-ad7be713a117\") " pod="openshift-authentication/oauth-openshift-558db77b4-4pj4c" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.703628 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09b3ca22-55ce-4f12-9f35-308b6020819f-etcd-service-ca\") pod \"etcd-operator-b45778765-w569t\" (UID: \"09b3ca22-55ce-4f12-9f35-308b6020819f\") " pod="openshift-etcd-operator/etcd-operator-b45778765-w569t" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.704592 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/56d0b0e8-6440-4f28-9d05-ad7be713a117-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-4pj4c\" (UID: \"56d0b0e8-6440-4f28-9d05-ad7be713a117\") " pod="openshift-authentication/oauth-openshift-558db77b4-4pj4c" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.709392 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/56d0b0e8-6440-4f28-9d05-ad7be713a117-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-4pj4c\" (UID: \"56d0b0e8-6440-4f28-9d05-ad7be713a117\") " pod="openshift-authentication/oauth-openshift-558db77b4-4pj4c" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.715030 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-r647z" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.716554 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09b3ca22-55ce-4f12-9f35-308b6020819f-etcd-client\") pod \"etcd-operator-b45778765-w569t\" (UID: \"09b3ca22-55ce-4f12-9f35-308b6020819f\") " pod="openshift-etcd-operator/etcd-operator-b45778765-w569t" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.716557 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/7ac58a67-2de7-48ec-9a6c-f7cf37538bdd-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-857r4\" (UID: \"7ac58a67-2de7-48ec-9a6c-f7cf37538bdd\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-857r4" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.716960 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8zfsz\" (UniqueName: \"kubernetes.io/projected/391aee9c-3245-49c5-a150-9d95b16b3c61-kube-api-access-8zfsz\") pod \"multus-admission-controller-857f4d67dd-dvq4s\" (UID: \"391aee9c-3245-49c5-a150-9d95b16b3c61\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-dvq4s" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.720259 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-8wffr" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.720513 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/56d0b0e8-6440-4f28-9d05-ad7be713a117-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-4pj4c\" (UID: \"56d0b0e8-6440-4f28-9d05-ad7be713a117\") " pod="openshift-authentication/oauth-openshift-558db77b4-4pj4c" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.720866 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/56d0b0e8-6440-4f28-9d05-ad7be713a117-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-4pj4c\" (UID: \"56d0b0e8-6440-4f28-9d05-ad7be713a117\") " pod="openshift-authentication/oauth-openshift-558db77b4-4pj4c" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.721083 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/485f1f0b-73b1-4681-9edb-683eda716bde-serving-cert\") pod \"service-ca-operator-777779d784-ln92w\" (UID: \"485f1f0b-73b1-4681-9edb-683eda716bde\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-ln92w" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.722415 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/33d62c67-3fa8-48ca-b984-820c3061a4ae-config-volume\") pod \"dns-default-kmxdh\" (UID: \"33d62c67-3fa8-48ca-b984-820c3061a4ae\") " pod="openshift-dns/dns-default-kmxdh" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.722660 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/577a8444-c6e3-4aae-922e-12c7cb3b0b11-service-ca\") pod \"console-f9d7485db-7pt7w\" (UID: \"577a8444-c6e3-4aae-922e-12c7cb3b0b11\") " pod="openshift-console/console-f9d7485db-7pt7w" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.723002 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/493abf55-6e87-4745-a90b-5564a4e42dab-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-bp8h2\" (UID: \"493abf55-6e87-4745-a90b-5564a4e42dab\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-bp8h2" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.723334 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/577a8444-c6e3-4aae-922e-12c7cb3b0b11-console-config\") pod \"console-f9d7485db-7pt7w\" (UID: \"577a8444-c6e3-4aae-922e-12c7cb3b0b11\") " pod="openshift-console/console-f9d7485db-7pt7w" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.723515 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/56d0b0e8-6440-4f28-9d05-ad7be713a117-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-4pj4c\" (UID: \"56d0b0e8-6440-4f28-9d05-ad7be713a117\") " pod="openshift-authentication/oauth-openshift-558db77b4-4pj4c" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.726954 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/56d0b0e8-6440-4f28-9d05-ad7be713a117-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-4pj4c\" (UID: \"56d0b0e8-6440-4f28-9d05-ad7be713a117\") " pod="openshift-authentication/oauth-openshift-558db77b4-4pj4c" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.728409 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/b3ccc1f5-4945-4a14-8f84-363683bbd575-bound-sa-token\") pod \"image-registry-697d97f7c8-fnpzd\" (UID: \"b3ccc1f5-4945-4a14-8f84-363683bbd575\") " pod="openshift-image-registry/image-registry-697d97f7c8-fnpzd" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.730742 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-sp272" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.731077 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/33d62c67-3fa8-48ca-b984-820c3061a4ae-metrics-tls\") pod \"dns-default-kmxdh\" (UID: \"33d62c67-3fa8-48ca-b984-820c3061a4ae\") " pod="openshift-dns/dns-default-kmxdh" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.732611 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-thdfx\" (UniqueName: \"kubernetes.io/projected/a5fc467b-0e4c-4f20-9729-56906756b33d-kube-api-access-thdfx\") pod \"collect-profiles-29319525-kwqwh\" (UID: \"a5fc467b-0e4c-4f20-9729-56906756b33d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319525-kwqwh" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.738511 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/machine-api-operator-5694c8668f-lwr4w" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.741289 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pjlcz\" (UniqueName: \"kubernetes.io/projected/0e5e4bed-dea6-4ddd-80a9-2aac8e7f8909-kube-api-access-pjlcz\") pod \"openshift-config-operator-7777fb866f-ktzhf\" (UID: \"0e5e4bed-dea6-4ddd-80a9-2aac8e7f8909\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-ktzhf" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.745808 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09b3ca22-55ce-4f12-9f35-308b6020819f-etcd-ca\") pod \"etcd-operator-b45778765-w569t\" (UID: \"09b3ca22-55ce-4f12-9f35-308b6020819f\") " pod="openshift-etcd-operator/etcd-operator-b45778765-w569t" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.748075 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/577a8444-c6e3-4aae-922e-12c7cb3b0b11-trusted-ca-bundle\") pod \"console-f9d7485db-7pt7w\" (UID: \"577a8444-c6e3-4aae-922e-12c7cb3b0b11\") " pod="openshift-console/console-f9d7485db-7pt7w" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.756799 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rw2tk\" (UniqueName: \"kubernetes.io/projected/b3ccc1f5-4945-4a14-8f84-363683bbd575-kube-api-access-rw2tk\") pod \"image-registry-697d97f7c8-fnpzd\" (UID: \"b3ccc1f5-4945-4a14-8f84-363683bbd575\") " pod="openshift-image-registry/image-registry-697d97f7c8-fnpzd" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.757110 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/493abf55-6e87-4745-a90b-5564a4e42dab-config\") pod \"authentication-operator-69f744f599-bp8h2\" (UID: \"493abf55-6e87-4745-a90b-5564a4e42dab\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-bp8h2" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.770646 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09b3ca22-55ce-4f12-9f35-308b6020819f-serving-cert\") pod \"etcd-operator-b45778765-w569t\" (UID: \"09b3ca22-55ce-4f12-9f35-308b6020819f\") " pod="openshift-etcd-operator/etcd-operator-b45778765-w569t" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.798413 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 18:58:50 crc kubenswrapper[4792]: E0929 18:58:50.799048 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 18:58:51.29902697 +0000 UTC m=+143.292334366 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.804715 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-vmw2h" event={"ID":"2b5b8e64-d53b-407e-a10f-d4fed5afd70c","Type":"ContainerStarted","Data":"579d8e2654271df4898dbc7569a349903608ffa15cac4c53acda15a55fa05bfe"} Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.805018 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/7467cb63-a3bc-42a9-88ad-f61eb5475110-cert\") pod \"ingress-canary-f6x5z\" (UID: \"7467cb63-a3bc-42a9-88ad-f61eb5475110\") " pod="openshift-ingress-canary/ingress-canary-f6x5z" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.813975 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rl9j7\" (UniqueName: \"kubernetes.io/projected/485f1f0b-73b1-4681-9edb-683eda716bde-kube-api-access-rl9j7\") pod \"service-ca-operator-777779d784-ln92w\" (UID: \"485f1f0b-73b1-4681-9edb-683eda716bde\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-ln92w" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.822829 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6tcpz\" (UniqueName: \"kubernetes.io/projected/4d8c1f74-cc55-4f70-afea-f177b99ec47c-kube-api-access-6tcpz\") pod \"dns-operator-744455d44c-vw7wd\" (UID: \"4d8c1f74-cc55-4f70-afea-f177b99ec47c\") " pod="openshift-dns-operator/dns-operator-744455d44c-vw7wd" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.860632 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress/router-default-5444994796-ldssp" event={"ID":"1b36a633-3ac6-4670-aa21-b5e3f750484f","Type":"ContainerStarted","Data":"73d0ffd96c6359cf5c0d79995bb73a03c5ce8652891dcc2db39b272b8397085e"} Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.864298 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mskst\" (UniqueName: \"kubernetes.io/projected/7ac58a67-2de7-48ec-9a6c-f7cf37538bdd-kube-api-access-mskst\") pod \"control-plane-machine-set-operator-78cbb6b69f-857r4\" (UID: \"7ac58a67-2de7-48ec-9a6c-f7cf37538bdd\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-857r4" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.881369 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vrxvb\" (UniqueName: \"kubernetes.io/projected/769115af-182e-4e4b-b39e-411bf0d27747-kube-api-access-vrxvb\") pod \"machine-config-server-n4krv\" (UID: \"769115af-182e-4e4b-b39e-411bf0d27747\") " pod="openshift-machine-config-operator/machine-config-server-n4krv" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.881559 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-b89qf" event={"ID":"e3ff0f1d-5141-47e0-b414-db59edba635c","Type":"ContainerStarted","Data":"fd7a2abed14a34b614d1fb458450f094de64bd1745bdaf8403e0ab99f13e4188"} Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.908670 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-fnpzd\" (UID: \"b3ccc1f5-4945-4a14-8f84-363683bbd575\") " pod="openshift-image-registry/image-registry-697d97f7c8-fnpzd" Sep 29 18:58:50 crc kubenswrapper[4792]: E0929 18:58:50.909024 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 18:58:51.409010737 +0000 UTC m=+143.402318133 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-fnpzd" (UID: "b3ccc1f5-4945-4a14-8f84-363683bbd575") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.913482 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-x758z"] Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.915941 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cv576\" (UniqueName: \"kubernetes.io/projected/33d62c67-3fa8-48ca-b984-820c3061a4ae-kube-api-access-cv576\") pod \"dns-default-kmxdh\" (UID: \"33d62c67-3fa8-48ca-b984-820c3061a4ae\") " pod="openshift-dns/dns-default-kmxdh" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.923908 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rcnfb\" (UniqueName: \"kubernetes.io/projected/7467cb63-a3bc-42a9-88ad-f61eb5475110-kube-api-access-rcnfb\") pod \"ingress-canary-f6x5z\" (UID: \"7467cb63-a3bc-42a9-88ad-f61eb5475110\") " pod="openshift-ingress-canary/ingress-canary-f6x5z" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.927365 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7wk22\" (UniqueName: \"kubernetes.io/projected/662a9f05-f1a7-4d9d-8b42-daadfeddb122-kube-api-access-7wk22\") pod \"downloads-7954f5f757-7phmq\" (UID: \"662a9f05-f1a7-4d9d-8b42-daadfeddb122\") " pod="openshift-console/downloads-7954f5f757-7phmq" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.933599 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-sqpsg"] Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.935356 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bmhgp\" (UniqueName: \"kubernetes.io/projected/c465eadf-19c1-417c-bf6e-8f4eb6d26338-kube-api-access-bmhgp\") pod \"console-operator-58897d9998-bpj7h\" (UID: \"c465eadf-19c1-417c-bf6e-8f4eb6d26338\") " pod="openshift-console-operator/console-operator-58897d9998-bpj7h" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.955544 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hnknq\" (UniqueName: \"kubernetes.io/projected/493abf55-6e87-4745-a90b-5564a4e42dab-kube-api-access-hnknq\") pod \"authentication-operator-69f744f599-bp8h2\" (UID: \"493abf55-6e87-4745-a90b-5564a4e42dab\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-bp8h2" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.961507 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-4bgtx" event={"ID":"4b90ed41-b2cd-4525-b5e2-11513ee0c763","Type":"ContainerStarted","Data":"90154c420f6ee53d4a92dce1bdd43603a6ef334931d1ad58f58a77809e05f802"} Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.962979 4792 patch_prober.go:28] interesting pod/controller-manager-879f6c89f-p9pds container/controller-manager namespace/openshift-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.6:8443/healthz\": dial tcp 10.217.0.6:8443: connect: connection refused" start-of-body= Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.969351 4792 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-controller-manager/controller-manager-879f6c89f-p9pds" podUID="3a5115bb-23d8-4ff0-9c56-419450cd87fe" containerName="controller-manager" probeResult="failure" output="Get \"https://10.217.0.6:8443/healthz\": dial tcp 10.217.0.6:8443: connect: connection refused" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.970080 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-admission-controller-857f4d67dd-dvq4s" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.972499 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j66wn\" (UniqueName: \"kubernetes.io/projected/d813bd44-0760-4757-95da-beced796238f-kube-api-access-j66wn\") pod \"csi-hostpathplugin-xkp78\" (UID: \"d813bd44-0760-4757-95da-beced796238f\") " pod="hostpath-provisioner/csi-hostpathplugin-xkp78" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.983002 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-d27m9"] Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.987737 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319525-kwqwh" Sep 29 18:58:50 crc kubenswrapper[4792]: I0929 18:58:50.989452 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mttjt\" (UniqueName: \"kubernetes.io/projected/56d0b0e8-6440-4f28-9d05-ad7be713a117-kube-api-access-mttjt\") pod \"oauth-openshift-558db77b4-4pj4c\" (UID: \"56d0b0e8-6440-4f28-9d05-ad7be713a117\") " pod="openshift-authentication/oauth-openshift-558db77b4-4pj4c" Sep 29 18:58:51 crc kubenswrapper[4792]: I0929 18:58:51.009375 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 18:58:51 crc kubenswrapper[4792]: I0929 18:58:51.009744 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-config-operator/openshift-config-operator-7777fb866f-ktzhf" Sep 29 18:58:51 crc kubenswrapper[4792]: E0929 18:58:51.010086 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 18:58:51.51005519 +0000 UTC m=+143.503362586 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 18:58:51 crc kubenswrapper[4792]: I0929 18:58:51.011908 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nrv4c\" (UniqueName: \"kubernetes.io/projected/440c70b7-786c-454b-9910-11923a2cf456-kube-api-access-nrv4c\") pod \"service-ca-9c57cc56f-t4h92\" (UID: \"440c70b7-786c-454b-9910-11923a2cf456\") " pod="openshift-service-ca/service-ca-9c57cc56f-t4h92" Sep 29 18:58:51 crc kubenswrapper[4792]: I0929 18:58:51.027356 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4hdqg\" (UniqueName: \"kubernetes.io/projected/577a8444-c6e3-4aae-922e-12c7cb3b0b11-kube-api-access-4hdqg\") pod \"console-f9d7485db-7pt7w\" (UID: \"577a8444-c6e3-4aae-922e-12c7cb3b0b11\") " pod="openshift-console/console-f9d7485db-7pt7w" Sep 29 18:58:51 crc kubenswrapper[4792]: I0929 18:58:51.045106 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication-operator/authentication-operator-69f744f599-bp8h2" Sep 29 18:58:51 crc kubenswrapper[4792]: I0929 18:58:51.045317 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5lxcf\" (UniqueName: \"kubernetes.io/projected/09b3ca22-55ce-4f12-9f35-308b6020819f-kube-api-access-5lxcf\") pod \"etcd-operator-b45778765-w569t\" (UID: \"09b3ca22-55ce-4f12-9f35-308b6020819f\") " pod="openshift-etcd-operator/etcd-operator-b45778765-w569t" Sep 29 18:58:51 crc kubenswrapper[4792]: I0929 18:58:51.058067 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console-operator/console-operator-58897d9998-bpj7h" Sep 29 18:58:51 crc kubenswrapper[4792]: I0929 18:58:51.069505 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-857r4" Sep 29 18:58:51 crc kubenswrapper[4792]: I0929 18:58:51.071333 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca-operator/service-ca-operator-777779d784-ln92w" Sep 29 18:58:51 crc kubenswrapper[4792]: I0929 18:58:51.078094 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca/service-ca-9c57cc56f-t4h92" Sep 29 18:58:51 crc kubenswrapper[4792]: I0929 18:58:51.095098 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/downloads-7954f5f757-7phmq" Sep 29 18:58:51 crc kubenswrapper[4792]: I0929 18:58:51.095504 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd-operator/etcd-operator-b45778765-w569t" Sep 29 18:58:51 crc kubenswrapper[4792]: I0929 18:58:51.107252 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-7pt7w" Sep 29 18:58:51 crc kubenswrapper[4792]: I0929 18:58:51.107588 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-4pj4c" Sep 29 18:58:51 crc kubenswrapper[4792]: I0929 18:58:51.111482 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-fnpzd\" (UID: \"b3ccc1f5-4945-4a14-8f84-363683bbd575\") " pod="openshift-image-registry/image-registry-697d97f7c8-fnpzd" Sep 29 18:58:51 crc kubenswrapper[4792]: E0929 18:58:51.114007 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 18:58:51.613993961 +0000 UTC m=+143.607301357 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-fnpzd" (UID: "b3ccc1f5-4945-4a14-8f84-363683bbd575") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 18:58:51 crc kubenswrapper[4792]: I0929 18:58:51.121655 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns-operator/dns-operator-744455d44c-vw7wd" Sep 29 18:58:51 crc kubenswrapper[4792]: I0929 18:58:51.122488 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-canary/ingress-canary-f6x5z" Sep 29 18:58:51 crc kubenswrapper[4792]: I0929 18:58:51.140002 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/dns-default-kmxdh" Sep 29 18:58:51 crc kubenswrapper[4792]: I0929 18:58:51.147710 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-server-n4krv" Sep 29 18:58:51 crc kubenswrapper[4792]: I0929 18:58:51.172098 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="hostpath-provisioner/csi-hostpathplugin-xkp78" Sep 29 18:58:51 crc kubenswrapper[4792]: I0929 18:58:51.216501 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 18:58:51 crc kubenswrapper[4792]: E0929 18:58:51.216952 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 18:58:51.716920746 +0000 UTC m=+143.710228142 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 18:58:51 crc kubenswrapper[4792]: I0929 18:58:51.263521 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-p4lzh"] Sep 29 18:58:51 crc kubenswrapper[4792]: I0929 18:58:51.263948 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-bnclj"] Sep 29 18:58:51 crc kubenswrapper[4792]: I0929 18:58:51.292643 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-6r69d"] Sep 29 18:58:51 crc kubenswrapper[4792]: I0929 18:58:51.318983 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-fnpzd\" (UID: \"b3ccc1f5-4945-4a14-8f84-363683bbd575\") " pod="openshift-image-registry/image-registry-697d97f7c8-fnpzd" Sep 29 18:58:51 crc kubenswrapper[4792]: E0929 18:58:51.319507 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 18:58:51.819488961 +0000 UTC m=+143.812796357 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-fnpzd" (UID: "b3ccc1f5-4945-4a14-8f84-363683bbd575") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 18:58:51 crc kubenswrapper[4792]: I0929 18:58:51.419696 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 18:58:51 crc kubenswrapper[4792]: I0929 18:58:51.419777 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-ljgr4"] Sep 29 18:58:51 crc kubenswrapper[4792]: E0929 18:58:51.420106 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 18:58:51.920090952 +0000 UTC m=+143.913398348 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 18:58:51 crc kubenswrapper[4792]: I0929 18:58:51.464764 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-nv8fj"] Sep 29 18:58:51 crc kubenswrapper[4792]: I0929 18:58:51.485359 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-h2hml"] Sep 29 18:58:51 crc kubenswrapper[4792]: I0929 18:58:51.501828 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-w22fq"] Sep 29 18:58:51 crc kubenswrapper[4792]: I0929 18:58:51.521265 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-fnpzd\" (UID: \"b3ccc1f5-4945-4a14-8f84-363683bbd575\") " pod="openshift-image-registry/image-registry-697d97f7c8-fnpzd" Sep 29 18:58:51 crc kubenswrapper[4792]: E0929 18:58:51.521607 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 18:58:52.021583447 +0000 UTC m=+144.014890833 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-fnpzd" (UID: "b3ccc1f5-4945-4a14-8f84-363683bbd575") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 18:58:51 crc kubenswrapper[4792]: I0929 18:58:51.523329 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-vt754"] Sep 29 18:58:51 crc kubenswrapper[4792]: I0929 18:58:51.622416 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 18:58:51 crc kubenswrapper[4792]: E0929 18:58:51.623264 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 18:58:52.123230287 +0000 UTC m=+144.116537683 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 18:58:51 crc kubenswrapper[4792]: I0929 18:58:51.720906 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-bfnfv"] Sep 29 18:58:51 crc kubenswrapper[4792]: I0929 18:58:51.723921 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-fnpzd\" (UID: \"b3ccc1f5-4945-4a14-8f84-363683bbd575\") " pod="openshift-image-registry/image-registry-697d97f7c8-fnpzd" Sep 29 18:58:51 crc kubenswrapper[4792]: E0929 18:58:51.724237 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 18:58:52.224226439 +0000 UTC m=+144.217533835 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-fnpzd" (UID: "b3ccc1f5-4945-4a14-8f84-363683bbd575") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 18:58:51 crc kubenswrapper[4792]: I0929 18:58:51.798293 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-sp272"] Sep 29 18:58:51 crc kubenswrapper[4792]: I0929 18:58:51.831896 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 18:58:51 crc kubenswrapper[4792]: E0929 18:58:51.832244 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 18:58:52.332230462 +0000 UTC m=+144.325537858 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 18:58:51 crc kubenswrapper[4792]: I0929 18:58:51.849332 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-879f6c89f-p9pds" podStartSLOduration=118.849303657 podStartE2EDuration="1m58.849303657s" podCreationTimestamp="2025-09-29 18:56:53 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 18:58:51.847257122 +0000 UTC m=+143.840564538" watchObservedRunningTime="2025-09-29 18:58:51.849303657 +0000 UTC m=+143.842611073" Sep 29 18:58:51 crc kubenswrapper[4792]: I0929 18:58:51.869434 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-8wffr"] Sep 29 18:58:51 crc kubenswrapper[4792]: I0929 18:58:51.919765 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-5j6xh" podStartSLOduration=119.919738657 podStartE2EDuration="1m59.919738657s" podCreationTimestamp="2025-09-29 18:56:52 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 18:58:51.917425374 +0000 UTC m=+143.910732770" watchObservedRunningTime="2025-09-29 18:58:51.919738657 +0000 UTC m=+143.913046063" Sep 29 18:58:51 crc kubenswrapper[4792]: I0929 18:58:51.935265 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-fnpzd\" (UID: \"b3ccc1f5-4945-4a14-8f84-363683bbd575\") " pod="openshift-image-registry/image-registry-697d97f7c8-fnpzd" Sep 29 18:58:51 crc kubenswrapper[4792]: E0929 18:58:51.952913 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 18:58:52.45288681 +0000 UTC m=+144.446194206 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-fnpzd" (UID: "b3ccc1f5-4945-4a14-8f84-363683bbd575") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 18:58:51 crc kubenswrapper[4792]: I0929 18:58:51.961346 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-r647z"] Sep 29 18:58:52 crc kubenswrapper[4792]: I0929 18:58:52.037388 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-p4lzh" event={"ID":"56300584-499b-4d05-ada3-93dade9c9d9e","Type":"ContainerStarted","Data":"64361033baed26774d555212eeccc085d563180eea407f3d8c1a03596d0ce417"} Sep 29 18:58:52 crc kubenswrapper[4792]: I0929 18:58:52.048080 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 18:58:52 crc kubenswrapper[4792]: E0929 18:58:52.048694 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 18:58:52.54867547 +0000 UTC m=+144.541982866 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 18:58:52 crc kubenswrapper[4792]: I0929 18:58:52.056270 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-bnclj" event={"ID":"6b7b0964-7f17-4f2f-8a3f-f5e5171fec41","Type":"ContainerStarted","Data":"07a36a98774f48680f31a09fe1ec98a1ec7510dd66b6cb4e9af30b6a1e34ca52"} Sep 29 18:58:52 crc kubenswrapper[4792]: I0929 18:58:52.064995 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-bfnfv" event={"ID":"ea34b349-a47a-4632-9fc6-b86e0d606e54","Type":"ContainerStarted","Data":"0d183e0d916af252f54ec7ac6e501585bb0d83ae841fec4f4aa752fe5edd2993"} Sep 29 18:58:52 crc kubenswrapper[4792]: I0929 18:58:52.100626 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-4bgtx" event={"ID":"4b90ed41-b2cd-4525-b5e2-11513ee0c763","Type":"ContainerStarted","Data":"88ffe41b626c5f870076c0ae91bad0d8ae183d8863f800f94728d6bb6efb26ac"} Sep 29 18:58:52 crc kubenswrapper[4792]: I0929 18:58:52.154258 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-fnpzd\" (UID: \"b3ccc1f5-4945-4a14-8f84-363683bbd575\") " pod="openshift-image-registry/image-registry-697d97f7c8-fnpzd" Sep 29 18:58:52 crc kubenswrapper[4792]: E0929 18:58:52.154653 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 18:58:52.654628907 +0000 UTC m=+144.647936303 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-fnpzd" (UID: "b3ccc1f5-4945-4a14-8f84-363683bbd575") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 18:58:52 crc kubenswrapper[4792]: I0929 18:58:52.184277 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress/router-default-5444994796-ldssp" event={"ID":"1b36a633-3ac6-4670-aa21-b5e3f750484f","Type":"ContainerStarted","Data":"8c86643281020efe5d48c7983a81b57fcea1b01af6ba84a8d4f4a24d0caf9457"} Sep 29 18:58:52 crc kubenswrapper[4792]: I0929 18:58:52.275123 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-server-n4krv" event={"ID":"769115af-182e-4e4b-b39e-411bf0d27747","Type":"ContainerStarted","Data":"e8721713d092e08ddb8a139cdaf62fbfcf26cfb3a2e0a76cb4e586db020af543"} Sep 29 18:58:52 crc kubenswrapper[4792]: I0929 18:58:52.276643 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 18:58:52 crc kubenswrapper[4792]: E0929 18:58:52.278127 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 18:58:52.778105642 +0000 UTC m=+144.771413038 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 18:58:52 crc kubenswrapper[4792]: I0929 18:58:52.331170 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-ingress/router-default-5444994796-ldssp" Sep 29 18:58:52 crc kubenswrapper[4792]: I0929 18:58:52.337496 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-vt754" event={"ID":"1a5d2b1d-2a67-490d-8f55-45a7a0219457","Type":"ContainerStarted","Data":"33db7c30dd0f6d6cd85d177cd10667535a15380099bde7402807c9fb1a049d14"} Sep 29 18:58:52 crc kubenswrapper[4792]: I0929 18:58:52.349007 4792 patch_prober.go:28] interesting pod/router-default-5444994796-ldssp container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Sep 29 18:58:52 crc kubenswrapper[4792]: [-]has-synced failed: reason withheld Sep 29 18:58:52 crc kubenswrapper[4792]: [+]process-running ok Sep 29 18:58:52 crc kubenswrapper[4792]: healthz check failed Sep 29 18:58:52 crc kubenswrapper[4792]: I0929 18:58:52.349072 4792 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-ldssp" podUID="1b36a633-3ac6-4670-aa21-b5e3f750484f" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Sep 29 18:58:52 crc kubenswrapper[4792]: I0929 18:58:52.353175 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-w22fq" event={"ID":"c24d14ff-9dd4-47e0-9a5c-3e03e88b9aee","Type":"ContainerStarted","Data":"768edec4fc6c451abcf5840aa90f430824b1b2b15010909dec2b89b45f16e802"} Sep 29 18:58:52 crc kubenswrapper[4792]: I0929 18:58:52.367399 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-sqpsg" event={"ID":"0d4ea471-e5cc-4571-9d2d-baab1747a457","Type":"ContainerStarted","Data":"3d9b0f0e4e4b94e6a6d068074833c4b26f6218ea836ad5f7000520d45d17ba0a"} Sep 29 18:58:52 crc kubenswrapper[4792]: I0929 18:58:52.381520 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-fnpzd\" (UID: \"b3ccc1f5-4945-4a14-8f84-363683bbd575\") " pod="openshift-image-registry/image-registry-697d97f7c8-fnpzd" Sep 29 18:58:52 crc kubenswrapper[4792]: E0929 18:58:52.381890 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 18:58:52.881868349 +0000 UTC m=+144.875175735 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-fnpzd" (UID: "b3ccc1f5-4945-4a14-8f84-363683bbd575") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 18:58:52 crc kubenswrapper[4792]: I0929 18:58:52.395891 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-b89qf" event={"ID":"e3ff0f1d-5141-47e0-b414-db59edba635c","Type":"ContainerStarted","Data":"74c6143df495c7cfd0c35254835bcc3cd1d345e6071e72458f84218bebefa0a1"} Sep 29 18:58:52 crc kubenswrapper[4792]: I0929 18:58:52.442901 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-h2hml" event={"ID":"aa7d4456-c058-4b02-bdf7-4ea41d52e777","Type":"ContainerStarted","Data":"b41fe2d38be9af538c7dab29f5dfacead3bfbd965fb6771f27796a92a24b4156"} Sep 29 18:58:52 crc kubenswrapper[4792]: I0929 18:58:52.471374 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-6r69d" event={"ID":"502b8b77-8c80-4cc0-8590-6fb9ce342289","Type":"ContainerStarted","Data":"fa63383ff0e09162e1730c626421fe38322953b2c3783709d59020c3446e769c"} Sep 29 18:58:52 crc kubenswrapper[4792]: I0929 18:58:52.478747 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-lwr4w"] Sep 29 18:58:52 crc kubenswrapper[4792]: I0929 18:58:52.488360 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 18:58:52 crc kubenswrapper[4792]: E0929 18:58:52.488645 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 18:58:52.988628298 +0000 UTC m=+144.981935694 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 18:58:52 crc kubenswrapper[4792]: I0929 18:58:52.498295 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-4pj4c"] Sep 29 18:58:52 crc kubenswrapper[4792]: I0929 18:58:52.513740 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-ljgr4" event={"ID":"59997681-59ad-46c1-b61e-5206099176d6","Type":"ContainerStarted","Data":"64b7c6560814c0c4e8bc96ef7bb806358cfb40c1983d1163c8b62a9e3b648313"} Sep 29 18:58:52 crc kubenswrapper[4792]: I0929 18:58:52.539841 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-t9hk9" podStartSLOduration=120.539823563 podStartE2EDuration="2m0.539823563s" podCreationTimestamp="2025-09-29 18:56:52 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 18:58:52.539792322 +0000 UTC m=+144.533099718" watchObservedRunningTime="2025-09-29 18:58:52.539823563 +0000 UTC m=+144.533130959" Sep 29 18:58:52 crc kubenswrapper[4792]: I0929 18:58:52.544084 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-nv8fj" event={"ID":"837e9a61-a894-44bf-981a-1bfae662e1e8","Type":"ContainerStarted","Data":"0cacf4071e3bba1defb615abba366d9bb1f139ef14dc6a6219c33332341b97be"} Sep 29 18:58:52 crc kubenswrapper[4792]: I0929 18:58:52.565540 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-x758z" event={"ID":"7f2a0e30-8d34-4540-b7b5-99db8dc99d05","Type":"ContainerStarted","Data":"999453018b3ac5e7318428a58579efbb7d4abd22a5e14bcfb94a67fd41cca699"} Sep 29 18:58:52 crc kubenswrapper[4792]: I0929 18:58:52.589582 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-fnpzd\" (UID: \"b3ccc1f5-4945-4a14-8f84-363683bbd575\") " pod="openshift-image-registry/image-registry-697d97f7c8-fnpzd" Sep 29 18:58:52 crc kubenswrapper[4792]: E0929 18:58:52.593379 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 18:58:53.093358422 +0000 UTC m=+145.086665818 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-fnpzd" (UID: "b3ccc1f5-4945-4a14-8f84-363683bbd575") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 18:58:52 crc kubenswrapper[4792]: I0929 18:58:52.603149 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-d27m9" event={"ID":"a09c3bef-aa53-4bb8-9cf1-b691a3276ed4","Type":"ContainerStarted","Data":"8cffd6786e892d053a863e75c379f0d18868df99997cab13c57a02c421221375"} Sep 29 18:58:52 crc kubenswrapper[4792]: I0929 18:58:52.627745 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-sp272" event={"ID":"5a99395b-1e1f-425f-b934-8ad850a2e8a5","Type":"ContainerStarted","Data":"218d14d62af23b7894d7fb206695077df68abb4ecabbfa885315e105c18749a2"} Sep 29 18:58:52 crc kubenswrapper[4792]: I0929 18:58:52.674720 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-879f6c89f-p9pds" Sep 29 18:58:52 crc kubenswrapper[4792]: I0929 18:58:52.691248 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 18:58:52 crc kubenswrapper[4792]: E0929 18:58:52.692128 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 18:58:53.192111513 +0000 UTC m=+145.185418909 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 18:58:52 crc kubenswrapper[4792]: I0929 18:58:52.793225 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-fnpzd\" (UID: \"b3ccc1f5-4945-4a14-8f84-363683bbd575\") " pod="openshift-image-registry/image-registry-697d97f7c8-fnpzd" Sep 29 18:58:52 crc kubenswrapper[4792]: E0929 18:58:52.793594 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 18:58:53.293581938 +0000 UTC m=+145.286889334 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-fnpzd" (UID: "b3ccc1f5-4945-4a14-8f84-363683bbd575") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 18:58:52 crc kubenswrapper[4792]: I0929 18:58:52.854776 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console-operator/console-operator-58897d9998-bpj7h"] Sep 29 18:58:52 crc kubenswrapper[4792]: I0929 18:58:52.903088 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 18:58:52 crc kubenswrapper[4792]: E0929 18:58:52.903453 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 18:58:53.403439231 +0000 UTC m=+145.396746627 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 18:58:52 crc kubenswrapper[4792]: I0929 18:58:52.948510 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-l44c7" podStartSLOduration=119.948490859 podStartE2EDuration="1m59.948490859s" podCreationTimestamp="2025-09-29 18:56:53 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 18:58:52.877932736 +0000 UTC m=+144.871240152" watchObservedRunningTime="2025-09-29 18:58:52.948490859 +0000 UTC m=+144.941798255" Sep 29 18:58:52 crc kubenswrapper[4792]: I0929 18:58:52.949116 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-vmw2h" podStartSLOduration=119.949111056 podStartE2EDuration="1m59.949111056s" podCreationTimestamp="2025-09-29 18:56:53 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 18:58:52.939691829 +0000 UTC m=+144.932999245" watchObservedRunningTime="2025-09-29 18:58:52.949111056 +0000 UTC m=+144.942418452" Sep 29 18:58:53 crc kubenswrapper[4792]: I0929 18:58:53.004213 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-fnpzd\" (UID: \"b3ccc1f5-4945-4a14-8f84-363683bbd575\") " pod="openshift-image-registry/image-registry-697d97f7c8-fnpzd" Sep 29 18:58:53 crc kubenswrapper[4792]: E0929 18:58:53.004588 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 18:58:53.504575327 +0000 UTC m=+145.497882723 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-fnpzd" (UID: "b3ccc1f5-4945-4a14-8f84-363683bbd575") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 18:58:53 crc kubenswrapper[4792]: I0929 18:58:53.090664 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-857r4"] Sep 29 18:58:53 crc kubenswrapper[4792]: I0929 18:58:53.115537 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 18:58:53 crc kubenswrapper[4792]: E0929 18:58:53.115910 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 18:58:53.61589211 +0000 UTC m=+145.609199506 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 18:58:53 crc kubenswrapper[4792]: I0929 18:58:53.134913 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-dvq4s"] Sep 29 18:58:53 crc kubenswrapper[4792]: I0929 18:58:53.215045 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-apiserver/apiserver-76f77b778f-4bgtx" podStartSLOduration=121.215029252 podStartE2EDuration="2m1.215029252s" podCreationTimestamp="2025-09-29 18:56:52 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 18:58:53.213085169 +0000 UTC m=+145.206392575" watchObservedRunningTime="2025-09-29 18:58:53.215029252 +0000 UTC m=+145.208336648" Sep 29 18:58:53 crc kubenswrapper[4792]: I0929 18:58:53.217910 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-fnpzd\" (UID: \"b3ccc1f5-4945-4a14-8f84-363683bbd575\") " pod="openshift-image-registry/image-registry-697d97f7c8-fnpzd" Sep 29 18:58:53 crc kubenswrapper[4792]: E0929 18:58:53.218363 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 18:58:53.718350502 +0000 UTC m=+145.711657898 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-fnpzd" (UID: "b3ccc1f5-4945-4a14-8f84-363683bbd575") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 18:58:53 crc kubenswrapper[4792]: I0929 18:58:53.302294 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress/router-default-5444994796-ldssp" podStartSLOduration=120.302271479 podStartE2EDuration="2m0.302271479s" podCreationTimestamp="2025-09-29 18:56:53 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 18:58:53.299406131 +0000 UTC m=+145.292713547" watchObservedRunningTime="2025-09-29 18:58:53.302271479 +0000 UTC m=+145.295578875" Sep 29 18:58:53 crc kubenswrapper[4792]: I0929 18:58:53.302806 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-d27m9" podStartSLOduration=120.302796593 podStartE2EDuration="2m0.302796593s" podCreationTimestamp="2025-09-29 18:56:53 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 18:58:53.26193945 +0000 UTC m=+145.255246846" watchObservedRunningTime="2025-09-29 18:58:53.302796593 +0000 UTC m=+145.296103989" Sep 29 18:58:53 crc kubenswrapper[4792]: I0929 18:58:53.320226 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 18:58:53 crc kubenswrapper[4792]: E0929 18:58:53.320952 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 18:58:53.820930417 +0000 UTC m=+145.814237813 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 18:58:53 crc kubenswrapper[4792]: I0929 18:58:53.419820 4792 patch_prober.go:28] interesting pod/router-default-5444994796-ldssp container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Sep 29 18:58:53 crc kubenswrapper[4792]: [-]has-synced failed: reason withheld Sep 29 18:58:53 crc kubenswrapper[4792]: [+]process-running ok Sep 29 18:58:53 crc kubenswrapper[4792]: healthz check failed Sep 29 18:58:53 crc kubenswrapper[4792]: I0929 18:58:53.419908 4792 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-ldssp" podUID="1b36a633-3ac6-4670-aa21-b5e3f750484f" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Sep 29 18:58:53 crc kubenswrapper[4792]: I0929 18:58:53.422897 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-fnpzd\" (UID: \"b3ccc1f5-4945-4a14-8f84-363683bbd575\") " pod="openshift-image-registry/image-registry-697d97f7c8-fnpzd" Sep 29 18:58:53 crc kubenswrapper[4792]: E0929 18:58:53.423273 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 18:58:53.923257516 +0000 UTC m=+145.916564912 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-fnpzd" (UID: "b3ccc1f5-4945-4a14-8f84-363683bbd575") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 18:58:53 crc kubenswrapper[4792]: I0929 18:58:53.488978 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-apiserver/apiserver-76f77b778f-4bgtx" Sep 29 18:58:53 crc kubenswrapper[4792]: I0929 18:58:53.489400 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-apiserver/apiserver-76f77b778f-4bgtx" Sep 29 18:58:53 crc kubenswrapper[4792]: I0929 18:58:53.503042 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-vmw2h" Sep 29 18:58:53 crc kubenswrapper[4792]: I0929 18:58:53.503541 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-vmw2h" Sep 29 18:58:53 crc kubenswrapper[4792]: I0929 18:58:53.526039 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 18:58:53 crc kubenswrapper[4792]: E0929 18:58:53.526395 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 18:58:54.026379365 +0000 UTC m=+146.019686761 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 18:58:53 crc kubenswrapper[4792]: I0929 18:58:53.547762 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-vmw2h" Sep 29 18:58:53 crc kubenswrapper[4792]: I0929 18:58:53.626974 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-fnpzd\" (UID: \"b3ccc1f5-4945-4a14-8f84-363683bbd575\") " pod="openshift-image-registry/image-registry-697d97f7c8-fnpzd" Sep 29 18:58:53 crc kubenswrapper[4792]: E0929 18:58:53.627258 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 18:58:54.127246794 +0000 UTC m=+146.120554190 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-fnpzd" (UID: "b3ccc1f5-4945-4a14-8f84-363683bbd575") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 18:58:53 crc kubenswrapper[4792]: I0929 18:58:53.646749 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-dvq4s" event={"ID":"391aee9c-3245-49c5-a150-9d95b16b3c61","Type":"ContainerStarted","Data":"e562f5f1b12dde398b46d36a67155e96c1d2b3ccf8ea562adc97b07c1a24a129"} Sep 29 18:58:53 crc kubenswrapper[4792]: I0929 18:58:53.660727 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console-operator/console-operator-58897d9998-bpj7h" event={"ID":"c465eadf-19c1-417c-bf6e-8f4eb6d26338","Type":"ContainerStarted","Data":"0b29c0772035394c42f237ab8c2a5e6a1ea124778f82867df4f71ee76f0de866"} Sep 29 18:58:53 crc kubenswrapper[4792]: I0929 18:58:53.664161 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-4pj4c" event={"ID":"56d0b0e8-6440-4f28-9d05-ad7be713a117","Type":"ContainerStarted","Data":"91928c38b1f99db8d01fc1f53f96029af51d61838019c92ae900dfd857d069ad"} Sep 29 18:58:53 crc kubenswrapper[4792]: I0929 18:58:53.667443 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-857r4" event={"ID":"7ac58a67-2de7-48ec-9a6c-f7cf37538bdd","Type":"ContainerStarted","Data":"d597d6961e59bdd3434d870866974c892bc7c6649a421a9a727ccadaa0505a38"} Sep 29 18:58:53 crc kubenswrapper[4792]: I0929 18:58:53.679820 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319525-kwqwh"] Sep 29 18:58:53 crc kubenswrapper[4792]: I0929 18:58:53.690544 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-sqpsg" event={"ID":"0d4ea471-e5cc-4571-9d2d-baab1747a457","Type":"ContainerStarted","Data":"ec1a067bbc04db70061d4a9657ebfeb126daa718437b56b2f7eca518a4abd150"} Sep 29 18:58:53 crc kubenswrapper[4792]: I0929 18:58:53.693911 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-d27m9" event={"ID":"a09c3bef-aa53-4bb8-9cf1-b691a3276ed4","Type":"ContainerStarted","Data":"ee77da13c237543ebf18c34d54a1de0ff4b26e5f0d0d78f2ca3030b2544adb7c"} Sep 29 18:58:53 crc kubenswrapper[4792]: I0929 18:58:53.727814 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 18:58:53 crc kubenswrapper[4792]: E0929 18:58:53.729353 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 18:58:54.229330966 +0000 UTC m=+146.222638362 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 18:58:53 crc kubenswrapper[4792]: I0929 18:58:53.736191 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-8wffr" event={"ID":"358b1904-d49e-40a5-b5a7-624709da4e55","Type":"ContainerStarted","Data":"ff48ebe8756ae120442c315959b1964450a34daa31fcba91e29b51fc7f636553"} Sep 29 18:58:53 crc kubenswrapper[4792]: I0929 18:58:53.771651 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-vt754" event={"ID":"1a5d2b1d-2a67-490d-8f55-45a7a0219457","Type":"ContainerStarted","Data":"ba8c3adfdbc8c81716215fc03dae7f615c573783cebb7e1245e1c35f81504c92"} Sep 29 18:58:53 crc kubenswrapper[4792]: I0929 18:58:53.773632 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-vt754" Sep 29 18:58:53 crc kubenswrapper[4792]: I0929 18:58:53.779790 4792 patch_prober.go:28] interesting pod/packageserver-d55dfcdfc-vt754 container/packageserver namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.18:5443/healthz\": dial tcp 10.217.0.18:5443: connect: connection refused" start-of-body= Sep 29 18:58:53 crc kubenswrapper[4792]: I0929 18:58:53.779835 4792 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-vt754" podUID="1a5d2b1d-2a67-490d-8f55-45a7a0219457" containerName="packageserver" probeResult="failure" output="Get \"https://10.217.0.18:5443/healthz\": dial tcp 10.217.0.18:5443: connect: connection refused" Sep 29 18:58:53 crc kubenswrapper[4792]: I0929 18:58:53.810072 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-b89qf" event={"ID":"e3ff0f1d-5141-47e0-b414-db59edba635c","Type":"ContainerStarted","Data":"23b30cff89c6537cb40c40467c1670e767389b1e866e600141942ea2b8f0778d"} Sep 29 18:58:53 crc kubenswrapper[4792]: I0929 18:58:53.830896 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-fnpzd\" (UID: \"b3ccc1f5-4945-4a14-8f84-363683bbd575\") " pod="openshift-image-registry/image-registry-697d97f7c8-fnpzd" Sep 29 18:58:53 crc kubenswrapper[4792]: E0929 18:58:53.832289 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 18:58:54.332278361 +0000 UTC m=+146.325585747 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-fnpzd" (UID: "b3ccc1f5-4945-4a14-8f84-363683bbd575") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 18:58:53 crc kubenswrapper[4792]: I0929 18:58:53.834226 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-ljgr4" event={"ID":"59997681-59ad-46c1-b61e-5206099176d6","Type":"ContainerStarted","Data":"ef0f2789efd857954be707e789e6ec40394d6d509b11d4d4d3729e43ee1cad18"} Sep 29 18:58:53 crc kubenswrapper[4792]: I0929 18:58:53.850971 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-6r69d" event={"ID":"502b8b77-8c80-4cc0-8590-6fb9ce342289","Type":"ContainerStarted","Data":"d4df5141117ee82288cfaca12256a8e952d2fedf4239c0ae5eaf254327076ae4"} Sep 29 18:58:53 crc kubenswrapper[4792]: I0929 18:58:53.851913 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-6r69d" Sep 29 18:58:53 crc kubenswrapper[4792]: I0929 18:58:53.854361 4792 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-6r69d container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.17:8080/healthz\": dial tcp 10.217.0.17:8080: connect: connection refused" start-of-body= Sep 29 18:58:53 crc kubenswrapper[4792]: I0929 18:58:53.854520 4792 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-6r69d" podUID="502b8b77-8c80-4cc0-8590-6fb9ce342289" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.17:8080/healthz\": dial tcp 10.217.0.17:8080: connect: connection refused" Sep 29 18:58:53 crc kubenswrapper[4792]: I0929 18:58:53.871451 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-lwr4w" event={"ID":"39e18b3b-156d-46e5-9ace-51ee36c17614","Type":"ContainerStarted","Data":"a1c152b36d3a3d612e2449ab2a6fb0b37464a2cda3abe32cf55901b2fad6eaea"} Sep 29 18:58:53 crc kubenswrapper[4792]: I0929 18:58:53.876263 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-r647z" event={"ID":"64637bf9-60f4-4394-986b-b2fa4d7fb780","Type":"ContainerStarted","Data":"6c4d892cd07c7edcd4549c70f76c665e78976a82f7acadb80c96d7968a1a748b"} Sep 29 18:58:53 crc kubenswrapper[4792]: I0929 18:58:53.890760 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-vmw2h" Sep 29 18:58:53 crc kubenswrapper[4792]: I0929 18:58:53.904589 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-xkp78"] Sep 29 18:58:53 crc kubenswrapper[4792]: I0929 18:58:53.906259 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-ln92w"] Sep 29 18:58:53 crc kubenswrapper[4792]: I0929 18:58:53.916868 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-t4h92"] Sep 29 18:58:53 crc kubenswrapper[4792]: I0929 18:58:53.918029 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-vt754" podStartSLOduration=120.918014667 podStartE2EDuration="2m0.918014667s" podCreationTimestamp="2025-09-29 18:56:53 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 18:58:53.894428324 +0000 UTC m=+145.887735720" watchObservedRunningTime="2025-09-29 18:58:53.918014667 +0000 UTC m=+145.911322053" Sep 29 18:58:53 crc kubenswrapper[4792]: I0929 18:58:53.920115 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-b89qf" podStartSLOduration=120.92010921400001 podStartE2EDuration="2m0.920109214s" podCreationTimestamp="2025-09-29 18:56:53 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 18:58:53.918004097 +0000 UTC m=+145.911311513" watchObservedRunningTime="2025-09-29 18:58:53.920109214 +0000 UTC m=+145.913416600" Sep 29 18:58:53 crc kubenswrapper[4792]: I0929 18:58:53.932682 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 18:58:53 crc kubenswrapper[4792]: E0929 18:58:53.934373 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 18:58:54.434350392 +0000 UTC m=+146.427657788 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 18:58:54 crc kubenswrapper[4792]: I0929 18:58:54.017072 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-6r69d" podStartSLOduration=121.017052186 podStartE2EDuration="2m1.017052186s" podCreationTimestamp="2025-09-29 18:56:53 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 18:58:53.982422632 +0000 UTC m=+145.975730048" watchObservedRunningTime="2025-09-29 18:58:54.017052186 +0000 UTC m=+146.010359582" Sep 29 18:58:54 crc kubenswrapper[4792]: I0929 18:58:54.040987 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-fnpzd\" (UID: \"b3ccc1f5-4945-4a14-8f84-363683bbd575\") " pod="openshift-image-registry/image-registry-697d97f7c8-fnpzd" Sep 29 18:58:54 crc kubenswrapper[4792]: E0929 18:58:54.041386 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 18:58:54.541374598 +0000 UTC m=+146.534681994 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-fnpzd" (UID: "b3ccc1f5-4945-4a14-8f84-363683bbd575") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 18:58:54 crc kubenswrapper[4792]: W0929 18:58:54.043674 4792 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod440c70b7_786c_454b_9910_11923a2cf456.slice/crio-8ed4bd111809cbcb29b5f92f4283c98849ce2527a62c415cc7a0fbb0877bb2f0 WatchSource:0}: Error finding container 8ed4bd111809cbcb29b5f92f4283c98849ce2527a62c415cc7a0fbb0877bb2f0: Status 404 returned error can't find the container with id 8ed4bd111809cbcb29b5f92f4283c98849ce2527a62c415cc7a0fbb0877bb2f0 Sep 29 18:58:54 crc kubenswrapper[4792]: I0929 18:58:54.146371 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 18:58:54 crc kubenswrapper[4792]: E0929 18:58:54.146901 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 18:58:54.646881793 +0000 UTC m=+146.640189189 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 18:58:54 crc kubenswrapper[4792]: I0929 18:58:54.146954 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-fnpzd\" (UID: \"b3ccc1f5-4945-4a14-8f84-363683bbd575\") " pod="openshift-image-registry/image-registry-697d97f7c8-fnpzd" Sep 29 18:58:54 crc kubenswrapper[4792]: E0929 18:58:54.147302 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 18:58:54.647292695 +0000 UTC m=+146.640600091 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-fnpzd" (UID: "b3ccc1f5-4945-4a14-8f84-363683bbd575") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 18:58:54 crc kubenswrapper[4792]: I0929 18:58:54.206688 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-bp8h2"] Sep 29 18:58:54 crc kubenswrapper[4792]: I0929 18:58:54.246665 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-w569t"] Sep 29 18:58:54 crc kubenswrapper[4792]: I0929 18:58:54.261065 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 18:58:54 crc kubenswrapper[4792]: E0929 18:58:54.261425 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 18:58:54.761409494 +0000 UTC m=+146.754716890 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 18:58:54 crc kubenswrapper[4792]: I0929 18:58:54.316777 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9d7485db-7pt7w"] Sep 29 18:58:54 crc kubenswrapper[4792]: I0929 18:58:54.339291 4792 patch_prober.go:28] interesting pod/router-default-5444994796-ldssp container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Sep 29 18:58:54 crc kubenswrapper[4792]: [-]has-synced failed: reason withheld Sep 29 18:58:54 crc kubenswrapper[4792]: [+]process-running ok Sep 29 18:58:54 crc kubenswrapper[4792]: healthz check failed Sep 29 18:58:54 crc kubenswrapper[4792]: I0929 18:58:54.339341 4792 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-ldssp" podUID="1b36a633-3ac6-4670-aa21-b5e3f750484f" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Sep 29 18:58:54 crc kubenswrapper[4792]: I0929 18:58:54.342054 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/downloads-7954f5f757-7phmq"] Sep 29 18:58:54 crc kubenswrapper[4792]: I0929 18:58:54.363791 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-fnpzd\" (UID: \"b3ccc1f5-4945-4a14-8f84-363683bbd575\") " pod="openshift-image-registry/image-registry-697d97f7c8-fnpzd" Sep 29 18:58:54 crc kubenswrapper[4792]: E0929 18:58:54.364170 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 18:58:54.864158724 +0000 UTC m=+146.857466120 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-fnpzd" (UID: "b3ccc1f5-4945-4a14-8f84-363683bbd575") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 18:58:54 crc kubenswrapper[4792]: I0929 18:58:54.458980 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-ktzhf"] Sep 29 18:58:54 crc kubenswrapper[4792]: I0929 18:58:54.464494 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 18:58:54 crc kubenswrapper[4792]: E0929 18:58:54.464816 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 18:58:54.964800236 +0000 UTC m=+146.958107632 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 18:58:54 crc kubenswrapper[4792]: I0929 18:58:54.464956 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns/dns-default-kmxdh"] Sep 29 18:58:54 crc kubenswrapper[4792]: I0929 18:58:54.508617 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-vw7wd"] Sep 29 18:58:54 crc kubenswrapper[4792]: W0929 18:58:54.536068 4792 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod33d62c67_3fa8_48ca_b984_820c3061a4ae.slice/crio-93d2efd8f31323737798e6d874d994d5cabe3cf605518a89ea4581e4e537d330 WatchSource:0}: Error finding container 93d2efd8f31323737798e6d874d994d5cabe3cf605518a89ea4581e4e537d330: Status 404 returned error can't find the container with id 93d2efd8f31323737798e6d874d994d5cabe3cf605518a89ea4581e4e537d330 Sep 29 18:58:54 crc kubenswrapper[4792]: W0929 18:58:54.539247 4792 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod0e5e4bed_dea6_4ddd_80a9_2aac8e7f8909.slice/crio-5aebf37689451e376f156dde57417edde186c83b2d4d5d3fd31d779f68f1a5e2 WatchSource:0}: Error finding container 5aebf37689451e376f156dde57417edde186c83b2d4d5d3fd31d779f68f1a5e2: Status 404 returned error can't find the container with id 5aebf37689451e376f156dde57417edde186c83b2d4d5d3fd31d779f68f1a5e2 Sep 29 18:58:54 crc kubenswrapper[4792]: I0929 18:58:54.565287 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-canary/ingress-canary-f6x5z"] Sep 29 18:58:54 crc kubenswrapper[4792]: I0929 18:58:54.565519 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-fnpzd\" (UID: \"b3ccc1f5-4945-4a14-8f84-363683bbd575\") " pod="openshift-image-registry/image-registry-697d97f7c8-fnpzd" Sep 29 18:58:54 crc kubenswrapper[4792]: E0929 18:58:54.565895 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 18:58:55.065883001 +0000 UTC m=+147.059190397 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-fnpzd" (UID: "b3ccc1f5-4945-4a14-8f84-363683bbd575") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 18:58:54 crc kubenswrapper[4792]: W0929 18:58:54.619013 4792 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod4d8c1f74_cc55_4f70_afea_f177b99ec47c.slice/crio-4560446aef28854a73d5d9e54a2690afc050ca2cb1177df19e8f75121bee7288 WatchSource:0}: Error finding container 4560446aef28854a73d5d9e54a2690afc050ca2cb1177df19e8f75121bee7288: Status 404 returned error can't find the container with id 4560446aef28854a73d5d9e54a2690afc050ca2cb1177df19e8f75121bee7288 Sep 29 18:58:54 crc kubenswrapper[4792]: I0929 18:58:54.670374 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 18:58:54 crc kubenswrapper[4792]: E0929 18:58:54.671480 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 18:58:55.171447296 +0000 UTC m=+147.164754692 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 18:58:54 crc kubenswrapper[4792]: I0929 18:58:54.772508 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-fnpzd\" (UID: \"b3ccc1f5-4945-4a14-8f84-363683bbd575\") " pod="openshift-image-registry/image-registry-697d97f7c8-fnpzd" Sep 29 18:58:54 crc kubenswrapper[4792]: E0929 18:58:54.772886 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 18:58:55.272829389 +0000 UTC m=+147.266136795 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-fnpzd" (UID: "b3ccc1f5-4945-4a14-8f84-363683bbd575") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 18:58:54 crc kubenswrapper[4792]: I0929 18:58:54.875548 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 18:58:54 crc kubenswrapper[4792]: E0929 18:58:54.875921 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 18:58:55.375904188 +0000 UTC m=+147.369211584 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 18:58:54 crc kubenswrapper[4792]: I0929 18:58:54.927644 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-7phmq" event={"ID":"662a9f05-f1a7-4d9d-8b42-daadfeddb122","Type":"ContainerStarted","Data":"a1e2526c57f75db6d9ada188713d1e9166f62a42fd2dae4614dc630cfca596e8"} Sep 29 18:58:54 crc kubenswrapper[4792]: I0929 18:58:54.975758 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-h2hml" event={"ID":"aa7d4456-c058-4b02-bdf7-4ea41d52e777","Type":"ContainerStarted","Data":"b610756f17f4912049ab12f37d51ebb93c3f2e9a98956bc54ec3d3ac74556a37"} Sep 29 18:58:54 crc kubenswrapper[4792]: I0929 18:58:54.977066 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-fnpzd\" (UID: \"b3ccc1f5-4945-4a14-8f84-363683bbd575\") " pod="openshift-image-registry/image-registry-697d97f7c8-fnpzd" Sep 29 18:58:54 crc kubenswrapper[4792]: E0929 18:58:54.991167 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 18:58:55.491149598 +0000 UTC m=+147.484456994 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-fnpzd" (UID: "b3ccc1f5-4945-4a14-8f84-363683bbd575") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 18:58:55 crc kubenswrapper[4792]: I0929 18:58:55.031436 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-h2hml" podStartSLOduration=122.031419245 podStartE2EDuration="2m2.031419245s" podCreationTimestamp="2025-09-29 18:56:53 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 18:58:55.030037917 +0000 UTC m=+147.023345313" watchObservedRunningTime="2025-09-29 18:58:55.031419245 +0000 UTC m=+147.024726641" Sep 29 18:58:55 crc kubenswrapper[4792]: I0929 18:58:55.079650 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 18:58:55 crc kubenswrapper[4792]: E0929 18:58:55.082592 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 18:58:55.582576039 +0000 UTC m=+147.575883435 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 18:58:55 crc kubenswrapper[4792]: I0929 18:58:55.131485 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-service-ca/service-ca-9c57cc56f-t4h92" podStartSLOduration=122.131467581 podStartE2EDuration="2m2.131467581s" podCreationTimestamp="2025-09-29 18:56:53 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 18:58:55.130166936 +0000 UTC m=+147.123474352" watchObservedRunningTime="2025-09-29 18:58:55.131467581 +0000 UTC m=+147.124774977" Sep 29 18:58:55 crc kubenswrapper[4792]: I0929 18:58:55.182755 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-fnpzd\" (UID: \"b3ccc1f5-4945-4a14-8f84-363683bbd575\") " pod="openshift-image-registry/image-registry-697d97f7c8-fnpzd" Sep 29 18:58:55 crc kubenswrapper[4792]: E0929 18:58:55.183977 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 18:58:55.683964392 +0000 UTC m=+147.677271788 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-fnpzd" (UID: "b3ccc1f5-4945-4a14-8f84-363683bbd575") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 18:58:55 crc kubenswrapper[4792]: I0929 18:58:55.196105 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca/service-ca-9c57cc56f-t4h92" event={"ID":"440c70b7-786c-454b-9910-11923a2cf456","Type":"ContainerStarted","Data":"118e20fa0d4f6c191c50a7bd2900762b8af67f3f454dcd31b13b7ce0f69f04b7"} Sep 29 18:58:55 crc kubenswrapper[4792]: I0929 18:58:55.196141 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca/service-ca-9c57cc56f-t4h92" event={"ID":"440c70b7-786c-454b-9910-11923a2cf456","Type":"ContainerStarted","Data":"8ed4bd111809cbcb29b5f92f4283c98849ce2527a62c415cc7a0fbb0877bb2f0"} Sep 29 18:58:55 crc kubenswrapper[4792]: I0929 18:58:55.206590 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-w22fq" event={"ID":"c24d14ff-9dd4-47e0-9a5c-3e03e88b9aee","Type":"ContainerStarted","Data":"685275a70512aecd10049a2dd2fa18a0bea3881a1ca9ceb98e5a8e85834fbb22"} Sep 29 18:58:55 crc kubenswrapper[4792]: I0929 18:58:55.207836 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-w22fq" Sep 29 18:58:55 crc kubenswrapper[4792]: I0929 18:58:55.216858 4792 patch_prober.go:28] interesting pod/olm-operator-6b444d44fb-w22fq container/olm-operator namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.10:8443/healthz\": dial tcp 10.217.0.10:8443: connect: connection refused" start-of-body= Sep 29 18:58:55 crc kubenswrapper[4792]: I0929 18:58:55.216912 4792 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-w22fq" podUID="c24d14ff-9dd4-47e0-9a5c-3e03e88b9aee" containerName="olm-operator" probeResult="failure" output="Get \"https://10.217.0.10:8443/healthz\": dial tcp 10.217.0.10:8443: connect: connection refused" Sep 29 18:58:55 crc kubenswrapper[4792]: I0929 18:58:55.219773 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-bnclj" event={"ID":"6b7b0964-7f17-4f2f-8a3f-f5e5171fec41","Type":"ContainerStarted","Data":"cd18ba91ff5b1dacc2a8fd470635fe9ef7f560b848a7d61eaf1db9cd8940bbf0"} Sep 29 18:58:55 crc kubenswrapper[4792]: I0929 18:58:55.246139 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-sp272" event={"ID":"5a99395b-1e1f-425f-b934-8ad850a2e8a5","Type":"ContainerStarted","Data":"44d548434c40d3c28ddc6949f9a660143b16c9134b797488d747495cda63806b"} Sep 29 18:58:55 crc kubenswrapper[4792]: I0929 18:58:55.267026 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-dvq4s" event={"ID":"391aee9c-3245-49c5-a150-9d95b16b3c61","Type":"ContainerStarted","Data":"3b09e08db8719b94aa84ce5633811e6d81793986e56dab137ea32e98659f4481"} Sep 29 18:58:55 crc kubenswrapper[4792]: I0929 18:58:55.284482 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 18:58:55 crc kubenswrapper[4792]: E0929 18:58:55.304011 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 18:58:55.803970792 +0000 UTC m=+147.797278188 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 18:58:55 crc kubenswrapper[4792]: I0929 18:58:55.336444 4792 patch_prober.go:28] interesting pod/router-default-5444994796-ldssp container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Sep 29 18:58:55 crc kubenswrapper[4792]: [-]has-synced failed: reason withheld Sep 29 18:58:55 crc kubenswrapper[4792]: [+]process-running ok Sep 29 18:58:55 crc kubenswrapper[4792]: healthz check failed Sep 29 18:58:55 crc kubenswrapper[4792]: I0929 18:58:55.336840 4792 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-ldssp" podUID="1b36a633-3ac6-4670-aa21-b5e3f750484f" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Sep 29 18:58:55 crc kubenswrapper[4792]: I0929 18:58:55.348755 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca-operator/service-ca-operator-777779d784-ln92w" event={"ID":"485f1f0b-73b1-4681-9edb-683eda716bde","Type":"ContainerStarted","Data":"a9b731af36a9203c26926e3d8dd8dea4bd6e6fcc16c3d6fdae2f4a1af84e81ab"} Sep 29 18:58:55 crc kubenswrapper[4792]: I0929 18:58:55.349013 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca-operator/service-ca-operator-777779d784-ln92w" event={"ID":"485f1f0b-73b1-4681-9edb-683eda716bde","Type":"ContainerStarted","Data":"e549633a8343ed92de88333eff1c36450c0943093baa441810b4db55ab01edb3"} Sep 29 18:58:55 crc kubenswrapper[4792]: I0929 18:58:55.361426 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd-operator/etcd-operator-b45778765-w569t" event={"ID":"09b3ca22-55ce-4f12-9f35-308b6020819f","Type":"ContainerStarted","Data":"f566c6ef5ecb352291857079f6d803df10a4bb6d3d83f46d9c54c3e2206dcca6"} Sep 29 18:58:55 crc kubenswrapper[4792]: I0929 18:58:55.363046 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-w22fq" podStartSLOduration=122.363029011 podStartE2EDuration="2m2.363029011s" podCreationTimestamp="2025-09-29 18:56:53 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 18:58:55.279966838 +0000 UTC m=+147.273274244" watchObservedRunningTime="2025-09-29 18:58:55.363029011 +0000 UTC m=+147.356336407" Sep 29 18:58:55 crc kubenswrapper[4792]: I0929 18:58:55.363727 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-sp272" podStartSLOduration=122.36372047 podStartE2EDuration="2m2.36372047s" podCreationTimestamp="2025-09-29 18:56:53 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 18:58:55.350934122 +0000 UTC m=+147.344241518" watchObservedRunningTime="2025-09-29 18:58:55.36372047 +0000 UTC m=+147.357027866" Sep 29 18:58:55 crc kubenswrapper[4792]: I0929 18:58:55.384714 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-xkp78" event={"ID":"d813bd44-0760-4757-95da-beced796238f","Type":"ContainerStarted","Data":"ae8a7b9431a90e9fad1d575f7fe3307c518ab0c5c4753b35d3d1d63f44d0dda7"} Sep 29 18:58:55 crc kubenswrapper[4792]: I0929 18:58:55.385602 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-fnpzd\" (UID: \"b3ccc1f5-4945-4a14-8f84-363683bbd575\") " pod="openshift-image-registry/image-registry-697d97f7c8-fnpzd" Sep 29 18:58:55 crc kubenswrapper[4792]: I0929 18:58:55.388469 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-bnclj" podStartSLOduration=122.388431213 podStartE2EDuration="2m2.388431213s" podCreationTimestamp="2025-09-29 18:56:53 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 18:58:55.385255827 +0000 UTC m=+147.378563233" watchObservedRunningTime="2025-09-29 18:58:55.388431213 +0000 UTC m=+147.381738609" Sep 29 18:58:55 crc kubenswrapper[4792]: E0929 18:58:55.391257 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 18:58:55.89124396 +0000 UTC m=+147.884551356 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-fnpzd" (UID: "b3ccc1f5-4945-4a14-8f84-363683bbd575") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 18:58:55 crc kubenswrapper[4792]: I0929 18:58:55.434076 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-lwr4w" event={"ID":"39e18b3b-156d-46e5-9ace-51ee36c17614","Type":"ContainerStarted","Data":"470f784da13649421a2df88a3c64c77dadae900d3e7c7982085d21fe98120416"} Sep 29 18:58:55 crc kubenswrapper[4792]: I0929 18:58:55.442650 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-service-ca-operator/service-ca-operator-777779d784-ln92w" podStartSLOduration=122.44263329 podStartE2EDuration="2m2.44263329s" podCreationTimestamp="2025-09-29 18:56:53 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 18:58:55.416873648 +0000 UTC m=+147.410181044" watchObservedRunningTime="2025-09-29 18:58:55.44263329 +0000 UTC m=+147.435940686" Sep 29 18:58:55 crc kubenswrapper[4792]: I0929 18:58:55.480567 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-kmxdh" event={"ID":"33d62c67-3fa8-48ca-b984-820c3061a4ae","Type":"ContainerStarted","Data":"93d2efd8f31323737798e6d874d994d5cabe3cf605518a89ea4581e4e537d330"} Sep 29 18:58:55 crc kubenswrapper[4792]: I0929 18:58:55.484605 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-api/machine-api-operator-5694c8668f-lwr4w" podStartSLOduration=122.484591574 podStartE2EDuration="2m2.484591574s" podCreationTimestamp="2025-09-29 18:56:53 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 18:58:55.483466133 +0000 UTC m=+147.476773529" watchObservedRunningTime="2025-09-29 18:58:55.484591574 +0000 UTC m=+147.477898970" Sep 29 18:58:55 crc kubenswrapper[4792]: I0929 18:58:55.486339 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 18:58:55 crc kubenswrapper[4792]: I0929 18:58:55.494886 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-4pj4c" event={"ID":"56d0b0e8-6440-4f28-9d05-ad7be713a117","Type":"ContainerStarted","Data":"c1ce633d2a2c5a4cb32631b665a6a6c0afb59e75fbeb0b551836d22a9e28724f"} Sep 29 18:58:55 crc kubenswrapper[4792]: E0929 18:58:55.495623 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 18:58:55.995597784 +0000 UTC m=+147.988905180 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 18:58:55 crc kubenswrapper[4792]: I0929 18:58:55.496088 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-558db77b4-4pj4c" Sep 29 18:58:55 crc kubenswrapper[4792]: I0929 18:58:55.498949 4792 patch_prober.go:28] interesting pod/oauth-openshift-558db77b4-4pj4c container/oauth-openshift namespace/openshift-authentication: Readiness probe status=failure output="Get \"https://10.217.0.41:6443/healthz\": dial tcp 10.217.0.41:6443: connect: connection refused" start-of-body= Sep 29 18:58:55 crc kubenswrapper[4792]: I0929 18:58:55.503137 4792 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-authentication/oauth-openshift-558db77b4-4pj4c" podUID="56d0b0e8-6440-4f28-9d05-ad7be713a117" containerName="oauth-openshift" probeResult="failure" output="Get \"https://10.217.0.41:6443/healthz\": dial tcp 10.217.0.41:6443: connect: connection refused" Sep 29 18:58:55 crc kubenswrapper[4792]: I0929 18:58:55.524263 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console-operator/console-operator-58897d9998-bpj7h" event={"ID":"c465eadf-19c1-417c-bf6e-8f4eb6d26338","Type":"ContainerStarted","Data":"7f2a42768a26212545226554cf6e8f6544392d2aa49e718b03bb723b924d48aa"} Sep 29 18:58:55 crc kubenswrapper[4792]: I0929 18:58:55.524441 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console-operator/console-operator-58897d9998-bpj7h" Sep 29 18:58:55 crc kubenswrapper[4792]: I0929 18:58:55.526108 4792 patch_prober.go:28] interesting pod/console-operator-58897d9998-bpj7h container/console-operator namespace/openshift-console-operator: Readiness probe status=failure output="Get \"https://10.217.0.36:8443/readyz\": dial tcp 10.217.0.36:8443: connect: connection refused" start-of-body= Sep 29 18:58:55 crc kubenswrapper[4792]: I0929 18:58:55.526141 4792 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console-operator/console-operator-58897d9998-bpj7h" podUID="c465eadf-19c1-417c-bf6e-8f4eb6d26338" containerName="console-operator" probeResult="failure" output="Get \"https://10.217.0.36:8443/readyz\": dial tcp 10.217.0.36:8443: connect: connection refused" Sep 29 18:58:55 crc kubenswrapper[4792]: I0929 18:58:55.526227 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-canary/ingress-canary-f6x5z" event={"ID":"7467cb63-a3bc-42a9-88ad-f61eb5475110","Type":"ContainerStarted","Data":"dd08ff41e1a0032909e08520d9081a38e30991c7ecf7949ebf9c88e55f9aba63"} Sep 29 18:58:55 crc kubenswrapper[4792]: I0929 18:58:55.539309 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-nv8fj" event={"ID":"837e9a61-a894-44bf-981a-1bfae662e1e8","Type":"ContainerStarted","Data":"c58d45da882eaad80edcbd29db0eae19bc731555aba339f67ea577d8f148fd3b"} Sep 29 18:58:55 crc kubenswrapper[4792]: I0929 18:58:55.553236 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-558db77b4-4pj4c" podStartSLOduration=123.553221154 podStartE2EDuration="2m3.553221154s" podCreationTimestamp="2025-09-29 18:56:52 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 18:58:55.539383867 +0000 UTC m=+147.532691263" watchObservedRunningTime="2025-09-29 18:58:55.553221154 +0000 UTC m=+147.546528550" Sep 29 18:58:55 crc kubenswrapper[4792]: I0929 18:58:55.574643 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-bfnfv" event={"ID":"ea34b349-a47a-4632-9fc6-b86e0d606e54","Type":"ContainerStarted","Data":"f7e342e10a8bae7012b157da568666af73442280d6188a95b9721576787423dd"} Sep 29 18:58:55 crc kubenswrapper[4792]: I0929 18:58:55.575400 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-bfnfv" Sep 29 18:58:55 crc kubenswrapper[4792]: I0929 18:58:55.587473 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-fnpzd\" (UID: \"b3ccc1f5-4945-4a14-8f84-363683bbd575\") " pod="openshift-image-registry/image-registry-697d97f7c8-fnpzd" Sep 29 18:58:55 crc kubenswrapper[4792]: E0929 18:58:55.591615 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 18:58:56.09160102 +0000 UTC m=+148.084908416 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-fnpzd" (UID: "b3ccc1f5-4945-4a14-8f84-363683bbd575") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 18:58:55 crc kubenswrapper[4792]: I0929 18:58:55.591646 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-sqpsg" event={"ID":"0d4ea471-e5cc-4571-9d2d-baab1747a457","Type":"ContainerStarted","Data":"f3cd79aab09fe7546bba74cdaaf6318a4c7f0178d0aadb0b403f02041b943780"} Sep 29 18:58:55 crc kubenswrapper[4792]: I0929 18:58:55.603741 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-vw7wd" event={"ID":"4d8c1f74-cc55-4f70-afea-f177b99ec47c","Type":"ContainerStarted","Data":"4560446aef28854a73d5d9e54a2690afc050ca2cb1177df19e8f75121bee7288"} Sep 29 18:58:55 crc kubenswrapper[4792]: I0929 18:58:55.607828 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-server-n4krv" event={"ID":"769115af-182e-4e4b-b39e-411bf0d27747","Type":"ContainerStarted","Data":"4b2ced523e53dcc83efd4ab18f4481f469e4a302442de04596c75d789d2244f4"} Sep 29 18:58:55 crc kubenswrapper[4792]: I0929 18:58:55.618718 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-x758z" event={"ID":"7f2a0e30-8d34-4540-b7b5-99db8dc99d05","Type":"ContainerStarted","Data":"a5b9001f870e2f8e741ac1a23c7afda5577176639d04a61c35cbbee42078ff4e"} Sep 29 18:58:55 crc kubenswrapper[4792]: I0929 18:58:55.618789 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-nv8fj" podStartSLOduration=122.61876515 podStartE2EDuration="2m2.61876515s" podCreationTimestamp="2025-09-29 18:56:53 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 18:58:55.606079954 +0000 UTC m=+147.599387350" watchObservedRunningTime="2025-09-29 18:58:55.61876515 +0000 UTC m=+147.612072546" Sep 29 18:58:55 crc kubenswrapper[4792]: I0929 18:58:55.630124 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-r647z" event={"ID":"64637bf9-60f4-4394-986b-b2fa4d7fb780","Type":"ContainerStarted","Data":"8b747028c161dfbce5f2bb89d37249c788ec94dc2da719f823bac07193abced2"} Sep 29 18:58:55 crc kubenswrapper[4792]: I0929 18:58:55.653936 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console-operator/console-operator-58897d9998-bpj7h" podStartSLOduration=123.653915918 podStartE2EDuration="2m3.653915918s" podCreationTimestamp="2025-09-29 18:56:52 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 18:58:55.651913613 +0000 UTC m=+147.645221019" watchObservedRunningTime="2025-09-29 18:58:55.653915918 +0000 UTC m=+147.647223314" Sep 29 18:58:55 crc kubenswrapper[4792]: I0929 18:58:55.675662 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-8wffr" event={"ID":"358b1904-d49e-40a5-b5a7-624709da4e55","Type":"ContainerStarted","Data":"673813b701402f2ea3bf2bdb080530364aced4c91f6fb4f925d198fc283e7c3f"} Sep 29 18:58:55 crc kubenswrapper[4792]: I0929 18:58:55.675703 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-8wffr" event={"ID":"358b1904-d49e-40a5-b5a7-624709da4e55","Type":"ContainerStarted","Data":"ed820abb40cfffbd1b509dc5f0e554a9fa0979d99248ec6b57857c1d182efb4d"} Sep 29 18:58:55 crc kubenswrapper[4792]: I0929 18:58:55.703325 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 18:58:55 crc kubenswrapper[4792]: E0929 18:58:55.704763 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 18:58:56.204747353 +0000 UTC m=+148.198054749 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 18:58:55 crc kubenswrapper[4792]: I0929 18:58:55.712516 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29319525-kwqwh" event={"ID":"a5fc467b-0e4c-4f20-9729-56906756b33d","Type":"ContainerStarted","Data":"826268197f27b99f4272acfc3e772be0e0fad546ed949d05bf541bdd9b1984bc"} Sep 29 18:58:55 crc kubenswrapper[4792]: I0929 18:58:55.712754 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29319525-kwqwh" event={"ID":"a5fc467b-0e4c-4f20-9729-56906756b33d","Type":"ContainerStarted","Data":"997f15ae24d17a24c343fcc1c5ec7d3ba9f2188042674a8512763a45e31b042e"} Sep 29 18:58:55 crc kubenswrapper[4792]: I0929 18:58:55.718518 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-bfnfv" podStartSLOduration=122.718502498 podStartE2EDuration="2m2.718502498s" podCreationTimestamp="2025-09-29 18:56:53 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 18:58:55.712800022 +0000 UTC m=+147.706107428" watchObservedRunningTime="2025-09-29 18:58:55.718502498 +0000 UTC m=+147.711809894" Sep 29 18:58:55 crc kubenswrapper[4792]: I0929 18:58:55.729939 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-857r4" event={"ID":"7ac58a67-2de7-48ec-9a6c-f7cf37538bdd","Type":"ContainerStarted","Data":"29d7526cc0d42322a6f4e45bd1e32ed6bb48ecb72e1c6f98af69fa7b304627e6"} Sep 29 18:58:55 crc kubenswrapper[4792]: I0929 18:58:55.736945 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-ljgr4" event={"ID":"59997681-59ad-46c1-b61e-5206099176d6","Type":"ContainerStarted","Data":"bbb5b9264cc57b11d7e192852188e9dc81267f4f53a0ee3fddd5e29e96990a74"} Sep 29 18:58:55 crc kubenswrapper[4792]: I0929 18:58:55.738548 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-7pt7w" event={"ID":"577a8444-c6e3-4aae-922e-12c7cb3b0b11","Type":"ContainerStarted","Data":"6106f5fd9165a2bdf2320371319e6c8d2d61ab4e1e6c4a33baa1a28e032ba099"} Sep 29 18:58:55 crc kubenswrapper[4792]: I0929 18:58:55.739681 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication-operator/authentication-operator-69f744f599-bp8h2" event={"ID":"493abf55-6e87-4745-a90b-5564a4e42dab","Type":"ContainerStarted","Data":"e5c6aa32d20aa711da4128a0d68cfdd8497e5a1c9be41e7a9ed26d20a0e6a9ea"} Sep 29 18:58:55 crc kubenswrapper[4792]: I0929 18:58:55.750875 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-ktzhf" event={"ID":"0e5e4bed-dea6-4ddd-80a9-2aac8e7f8909","Type":"ContainerStarted","Data":"5aebf37689451e376f156dde57417edde186c83b2d4d5d3fd31d779f68f1a5e2"} Sep 29 18:58:55 crc kubenswrapper[4792]: I0929 18:58:55.763651 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-p4lzh" event={"ID":"56300584-499b-4d05-ada3-93dade9c9d9e","Type":"ContainerStarted","Data":"014eaaef66935de9ddc8519dd0a1072bff82b0397602b1e0687588e43de44075"} Sep 29 18:58:55 crc kubenswrapper[4792]: I0929 18:58:55.763699 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-p4lzh" Sep 29 18:58:55 crc kubenswrapper[4792]: I0929 18:58:55.764297 4792 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-6r69d container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.17:8080/healthz\": dial tcp 10.217.0.17:8080: connect: connection refused" start-of-body= Sep 29 18:58:55 crc kubenswrapper[4792]: I0929 18:58:55.764337 4792 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-6r69d" podUID="502b8b77-8c80-4cc0-8590-6fb9ce342289" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.17:8080/healthz\": dial tcp 10.217.0.17:8080: connect: connection refused" Sep 29 18:58:55 crc kubenswrapper[4792]: I0929 18:58:55.764379 4792 patch_prober.go:28] interesting pod/catalog-operator-68c6474976-p4lzh container/catalog-operator namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.19:8443/healthz\": dial tcp 10.217.0.19:8443: connect: connection refused" start-of-body= Sep 29 18:58:55 crc kubenswrapper[4792]: I0929 18:58:55.764444 4792 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-p4lzh" podUID="56300584-499b-4d05-ada3-93dade9c9d9e" containerName="catalog-operator" probeResult="failure" output="Get \"https://10.217.0.19:8443/healthz\": dial tcp 10.217.0.19:8443: connect: connection refused" Sep 29 18:58:55 crc kubenswrapper[4792]: I0929 18:58:55.801944 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-x758z" podStartSLOduration=122.801928691 podStartE2EDuration="2m2.801928691s" podCreationTimestamp="2025-09-29 18:56:53 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 18:58:55.7490486 +0000 UTC m=+147.742355996" watchObservedRunningTime="2025-09-29 18:58:55.801928691 +0000 UTC m=+147.795236087" Sep 29 18:58:55 crc kubenswrapper[4792]: I0929 18:58:55.810842 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-fnpzd\" (UID: \"b3ccc1f5-4945-4a14-8f84-363683bbd575\") " pod="openshift-image-registry/image-registry-697d97f7c8-fnpzd" Sep 29 18:58:55 crc kubenswrapper[4792]: E0929 18:58:55.818091 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 18:58:56.318077021 +0000 UTC m=+148.311384417 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-fnpzd" (UID: "b3ccc1f5-4945-4a14-8f84-363683bbd575") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 18:58:55 crc kubenswrapper[4792]: I0929 18:58:55.868252 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-sqpsg" podStartSLOduration=122.868233377 podStartE2EDuration="2m2.868233377s" podCreationTimestamp="2025-09-29 18:56:53 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 18:58:55.80262288 +0000 UTC m=+147.795930276" watchObservedRunningTime="2025-09-29 18:58:55.868233377 +0000 UTC m=+147.861540773" Sep 29 18:58:55 crc kubenswrapper[4792]: I0929 18:58:55.914087 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 18:58:55 crc kubenswrapper[4792]: E0929 18:58:55.914488 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 18:58:56.414457427 +0000 UTC m=+148.407764823 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 18:58:55 crc kubenswrapper[4792]: I0929 18:58:55.918410 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-fnpzd\" (UID: \"b3ccc1f5-4945-4a14-8f84-363683bbd575\") " pod="openshift-image-registry/image-registry-697d97f7c8-fnpzd" Sep 29 18:58:55 crc kubenswrapper[4792]: E0929 18:58:55.925946 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 18:58:56.425911749 +0000 UTC m=+148.419219145 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-fnpzd" (UID: "b3ccc1f5-4945-4a14-8f84-363683bbd575") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 18:58:55 crc kubenswrapper[4792]: I0929 18:58:55.940425 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-server-n4krv" podStartSLOduration=8.940408743999999 podStartE2EDuration="8.940408744s" podCreationTimestamp="2025-09-29 18:58:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 18:58:55.870680664 +0000 UTC m=+147.863988070" watchObservedRunningTime="2025-09-29 18:58:55.940408744 +0000 UTC m=+147.933716140" Sep 29 18:58:56 crc kubenswrapper[4792]: I0929 18:58:55.999728 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-r647z" podStartSLOduration=122.99970863 podStartE2EDuration="2m2.99970863s" podCreationTimestamp="2025-09-29 18:56:53 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 18:58:55.938822261 +0000 UTC m=+147.932129667" watchObservedRunningTime="2025-09-29 18:58:55.99970863 +0000 UTC m=+147.993016026" Sep 29 18:58:56 crc kubenswrapper[4792]: I0929 18:58:56.019310 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 18:58:56 crc kubenswrapper[4792]: E0929 18:58:56.019685 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 18:58:56.519668654 +0000 UTC m=+148.512976050 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 18:58:56 crc kubenswrapper[4792]: I0929 18:58:56.020767 4792 patch_prober.go:28] interesting pod/apiserver-76f77b778f-4bgtx container/openshift-apiserver namespace/openshift-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[+]ping ok Sep 29 18:58:56 crc kubenswrapper[4792]: [+]log ok Sep 29 18:58:56 crc kubenswrapper[4792]: [+]etcd ok Sep 29 18:58:56 crc kubenswrapper[4792]: [+]poststarthook/start-apiserver-admission-initializer ok Sep 29 18:58:56 crc kubenswrapper[4792]: [+]poststarthook/generic-apiserver-start-informers ok Sep 29 18:58:56 crc kubenswrapper[4792]: [+]poststarthook/max-in-flight-filter ok Sep 29 18:58:56 crc kubenswrapper[4792]: [+]poststarthook/storage-object-count-tracker-hook ok Sep 29 18:58:56 crc kubenswrapper[4792]: [+]poststarthook/image.openshift.io-apiserver-caches ok Sep 29 18:58:56 crc kubenswrapper[4792]: [-]poststarthook/authorization.openshift.io-bootstrapclusterroles failed: reason withheld Sep 29 18:58:56 crc kubenswrapper[4792]: [-]poststarthook/authorization.openshift.io-ensurenodebootstrap-sa failed: reason withheld Sep 29 18:58:56 crc kubenswrapper[4792]: [+]poststarthook/project.openshift.io-projectcache ok Sep 29 18:58:56 crc kubenswrapper[4792]: [+]poststarthook/project.openshift.io-projectauthorizationcache ok Sep 29 18:58:56 crc kubenswrapper[4792]: [+]poststarthook/openshift.io-startinformers ok Sep 29 18:58:56 crc kubenswrapper[4792]: [+]poststarthook/openshift.io-restmapperupdater ok Sep 29 18:58:56 crc kubenswrapper[4792]: [+]poststarthook/quota.openshift.io-clusterquotamapping ok Sep 29 18:58:56 crc kubenswrapper[4792]: livez check failed Sep 29 18:58:56 crc kubenswrapper[4792]: I0929 18:58:56.020795 4792 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-apiserver/apiserver-76f77b778f-4bgtx" podUID="4b90ed41-b2cd-4525-b5e2-11513ee0c763" containerName="openshift-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 500" Sep 29 18:58:56 crc kubenswrapper[4792]: I0929 18:58:56.107083 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication-operator/authentication-operator-69f744f599-bp8h2" podStartSLOduration=124.107055105 podStartE2EDuration="2m4.107055105s" podCreationTimestamp="2025-09-29 18:56:52 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 18:58:56.06869689 +0000 UTC m=+148.062004296" watchObservedRunningTime="2025-09-29 18:58:56.107055105 +0000 UTC m=+148.100362501" Sep 29 18:58:56 crc kubenswrapper[4792]: I0929 18:58:56.107960 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-857r4" podStartSLOduration=123.10795296 podStartE2EDuration="2m3.10795296s" podCreationTimestamp="2025-09-29 18:56:53 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 18:58:56.104722232 +0000 UTC m=+148.098029638" watchObservedRunningTime="2025-09-29 18:58:56.10795296 +0000 UTC m=+148.101260356" Sep 29 18:58:56 crc kubenswrapper[4792]: I0929 18:58:56.120428 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-fnpzd\" (UID: \"b3ccc1f5-4945-4a14-8f84-363683bbd575\") " pod="openshift-image-registry/image-registry-697d97f7c8-fnpzd" Sep 29 18:58:56 crc kubenswrapper[4792]: E0929 18:58:56.120840 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 18:58:56.6208276 +0000 UTC m=+148.614134996 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-fnpzd" (UID: "b3ccc1f5-4945-4a14-8f84-363683bbd575") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 18:58:56 crc kubenswrapper[4792]: I0929 18:58:56.153575 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-p4lzh" podStartSLOduration=123.153561852 podStartE2EDuration="2m3.153561852s" podCreationTimestamp="2025-09-29 18:56:53 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 18:58:56.152272007 +0000 UTC m=+148.145579413" watchObservedRunningTime="2025-09-29 18:58:56.153561852 +0000 UTC m=+148.146869248" Sep 29 18:58:56 crc kubenswrapper[4792]: I0929 18:58:56.221757 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 18:58:56 crc kubenswrapper[4792]: E0929 18:58:56.222455 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 18:58:56.72172624 +0000 UTC m=+148.715033636 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 18:58:56 crc kubenswrapper[4792]: I0929 18:58:56.222584 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-fnpzd\" (UID: \"b3ccc1f5-4945-4a14-8f84-363683bbd575\") " pod="openshift-image-registry/image-registry-697d97f7c8-fnpzd" Sep 29 18:58:56 crc kubenswrapper[4792]: E0929 18:58:56.223173 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 18:58:56.723166169 +0000 UTC m=+148.716473565 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-fnpzd" (UID: "b3ccc1f5-4945-4a14-8f84-363683bbd575") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 18:58:56 crc kubenswrapper[4792]: I0929 18:58:56.225782 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-ljgr4" podStartSLOduration=123.22576381 podStartE2EDuration="2m3.22576381s" podCreationTimestamp="2025-09-29 18:56:53 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 18:58:56.22466772 +0000 UTC m=+148.217975116" watchObservedRunningTime="2025-09-29 18:58:56.22576381 +0000 UTC m=+148.219071196" Sep 29 18:58:56 crc kubenswrapper[4792]: I0929 18:58:56.267048 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/console-f9d7485db-7pt7w" podStartSLOduration=123.267030014 podStartE2EDuration="2m3.267030014s" podCreationTimestamp="2025-09-29 18:56:53 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 18:58:56.266359086 +0000 UTC m=+148.259666492" watchObservedRunningTime="2025-09-29 18:58:56.267030014 +0000 UTC m=+148.260337410" Sep 29 18:58:56 crc kubenswrapper[4792]: I0929 18:58:56.301742 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29319525-kwqwh" podStartSLOduration=124.30172214 podStartE2EDuration="2m4.30172214s" podCreationTimestamp="2025-09-29 18:56:52 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 18:58:56.300683121 +0000 UTC m=+148.293990527" watchObservedRunningTime="2025-09-29 18:58:56.30172214 +0000 UTC m=+148.295029536" Sep 29 18:58:56 crc kubenswrapper[4792]: I0929 18:58:56.323947 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 18:58:56 crc kubenswrapper[4792]: E0929 18:58:56.324344 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 18:58:56.824325835 +0000 UTC m=+148.817633231 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 18:58:56 crc kubenswrapper[4792]: I0929 18:58:56.352696 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-8wffr" podStartSLOduration=124.352676038 podStartE2EDuration="2m4.352676038s" podCreationTimestamp="2025-09-29 18:56:52 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 18:58:56.352102382 +0000 UTC m=+148.345409778" watchObservedRunningTime="2025-09-29 18:58:56.352676038 +0000 UTC m=+148.345983434" Sep 29 18:58:56 crc kubenswrapper[4792]: I0929 18:58:56.354814 4792 patch_prober.go:28] interesting pod/router-default-5444994796-ldssp container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Sep 29 18:58:56 crc kubenswrapper[4792]: [-]has-synced failed: reason withheld Sep 29 18:58:56 crc kubenswrapper[4792]: [+]process-running ok Sep 29 18:58:56 crc kubenswrapper[4792]: healthz check failed Sep 29 18:58:56 crc kubenswrapper[4792]: I0929 18:58:56.354923 4792 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-ldssp" podUID="1b36a633-3ac6-4670-aa21-b5e3f750484f" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Sep 29 18:58:56 crc kubenswrapper[4792]: I0929 18:58:56.426880 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-fnpzd\" (UID: \"b3ccc1f5-4945-4a14-8f84-363683bbd575\") " pod="openshift-image-registry/image-registry-697d97f7c8-fnpzd" Sep 29 18:58:56 crc kubenswrapper[4792]: E0929 18:58:56.427212 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 18:58:56.927201479 +0000 UTC m=+148.920508875 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-fnpzd" (UID: "b3ccc1f5-4945-4a14-8f84-363683bbd575") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 18:58:56 crc kubenswrapper[4792]: I0929 18:58:56.528615 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 18:58:56 crc kubenswrapper[4792]: E0929 18:58:56.528863 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 18:58:57.028807167 +0000 UTC m=+149.022114563 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 18:58:56 crc kubenswrapper[4792]: I0929 18:58:56.529509 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-fnpzd\" (UID: \"b3ccc1f5-4945-4a14-8f84-363683bbd575\") " pod="openshift-image-registry/image-registry-697d97f7c8-fnpzd" Sep 29 18:58:56 crc kubenswrapper[4792]: E0929 18:58:56.529992 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 18:58:57.029975729 +0000 UTC m=+149.023283125 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-fnpzd" (UID: "b3ccc1f5-4945-4a14-8f84-363683bbd575") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 18:58:56 crc kubenswrapper[4792]: I0929 18:58:56.630895 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 18:58:56 crc kubenswrapper[4792]: E0929 18:58:56.631156 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 18:58:57.131125265 +0000 UTC m=+149.124432661 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 18:58:56 crc kubenswrapper[4792]: I0929 18:58:56.631284 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-fnpzd\" (UID: \"b3ccc1f5-4945-4a14-8f84-363683bbd575\") " pod="openshift-image-registry/image-registry-697d97f7c8-fnpzd" Sep 29 18:58:56 crc kubenswrapper[4792]: E0929 18:58:56.631732 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 18:58:57.131720922 +0000 UTC m=+149.125028498 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-fnpzd" (UID: "b3ccc1f5-4945-4a14-8f84-363683bbd575") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 18:58:56 crc kubenswrapper[4792]: I0929 18:58:56.733105 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 18:58:56 crc kubenswrapper[4792]: E0929 18:58:56.733229 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 18:58:57.233211097 +0000 UTC m=+149.226518493 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 18:58:56 crc kubenswrapper[4792]: I0929 18:58:56.733522 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-fnpzd\" (UID: \"b3ccc1f5-4945-4a14-8f84-363683bbd575\") " pod="openshift-image-registry/image-registry-697d97f7c8-fnpzd" Sep 29 18:58:56 crc kubenswrapper[4792]: E0929 18:58:56.733861 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 18:58:57.233840624 +0000 UTC m=+149.227148020 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-fnpzd" (UID: "b3ccc1f5-4945-4a14-8f84-363683bbd575") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 18:58:56 crc kubenswrapper[4792]: I0929 18:58:56.764418 4792 patch_prober.go:28] interesting pod/packageserver-d55dfcdfc-vt754 container/packageserver namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.18:5443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Sep 29 18:58:56 crc kubenswrapper[4792]: I0929 18:58:56.764497 4792 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-vt754" podUID="1a5d2b1d-2a67-490d-8f55-45a7a0219457" containerName="packageserver" probeResult="failure" output="Get \"https://10.217.0.18:5443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Sep 29 18:58:56 crc kubenswrapper[4792]: I0929 18:58:56.770047 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-bfnfv" event={"ID":"ea34b349-a47a-4632-9fc6-b86e0d606e54","Type":"ContainerStarted","Data":"d725f7ef53a767b92a330c928bc7d4322c2d9a3085c7e43d89ea31f1c4c17b62"} Sep 29 18:58:56 crc kubenswrapper[4792]: I0929 18:58:56.771266 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication-operator/authentication-operator-69f744f599-bp8h2" event={"ID":"493abf55-6e87-4745-a90b-5564a4e42dab","Type":"ContainerStarted","Data":"6f0c1d8888d4854b5dad2e4a60550ffd13211794574d5374239f89b1358124cd"} Sep 29 18:58:56 crc kubenswrapper[4792]: I0929 18:58:56.772889 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-xkp78" event={"ID":"d813bd44-0760-4757-95da-beced796238f","Type":"ContainerStarted","Data":"12597bcdd86fbe325622f9815840b4ef71beb86f9d9f888b07051f937ff5c373"} Sep 29 18:58:56 crc kubenswrapper[4792]: I0929 18:58:56.774969 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-lwr4w" event={"ID":"39e18b3b-156d-46e5-9ace-51ee36c17614","Type":"ContainerStarted","Data":"aeb70ee0042c5c0ec56107e324337a8376437236f7e2165c02f7f71741561e5a"} Sep 29 18:58:56 crc kubenswrapper[4792]: I0929 18:58:56.777209 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-kmxdh" event={"ID":"33d62c67-3fa8-48ca-b984-820c3061a4ae","Type":"ContainerStarted","Data":"07eeff248a58d8717cbad4a1a2687757cf9512bde29390c9d70a160f2b68f519"} Sep 29 18:58:56 crc kubenswrapper[4792]: I0929 18:58:56.777243 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-kmxdh" event={"ID":"33d62c67-3fa8-48ca-b984-820c3061a4ae","Type":"ContainerStarted","Data":"6b1ae7b8dc74c45801857dcc1ae75c260dcfb4853d114c65d03196b9c469d707"} Sep 29 18:58:56 crc kubenswrapper[4792]: I0929 18:58:56.777291 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-dns/dns-default-kmxdh" Sep 29 18:58:56 crc kubenswrapper[4792]: I0929 18:58:56.779657 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-canary/ingress-canary-f6x5z" event={"ID":"7467cb63-a3bc-42a9-88ad-f61eb5475110","Type":"ContainerStarted","Data":"c16ffb35fa7ee64f419588d1a7f0d4b485660b64a16043c966d0e774dd89803a"} Sep 29 18:58:56 crc kubenswrapper[4792]: I0929 18:58:56.781397 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-7phmq" event={"ID":"662a9f05-f1a7-4d9d-8b42-daadfeddb122","Type":"ContainerStarted","Data":"4d128be38ea8e87d784039c39c139852f297e12961df59fa30251ca798fe546b"} Sep 29 18:58:56 crc kubenswrapper[4792]: I0929 18:58:56.782147 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/downloads-7954f5f757-7phmq" Sep 29 18:58:56 crc kubenswrapper[4792]: I0929 18:58:56.783634 4792 patch_prober.go:28] interesting pod/downloads-7954f5f757-7phmq container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.42:8080/\": dial tcp 10.217.0.42:8080: connect: connection refused" start-of-body= Sep 29 18:58:56 crc kubenswrapper[4792]: I0929 18:58:56.785232 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-7pt7w" event={"ID":"577a8444-c6e3-4aae-922e-12c7cb3b0b11","Type":"ContainerStarted","Data":"1ec6d316d0d34bf6dc311af24fdcf882f67e1166daf49cc16e49038e1c70aa4f"} Sep 29 18:58:56 crc kubenswrapper[4792]: I0929 18:58:56.786221 4792 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-7phmq" podUID="662a9f05-f1a7-4d9d-8b42-daadfeddb122" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.42:8080/\": dial tcp 10.217.0.42:8080: connect: connection refused" Sep 29 18:58:56 crc kubenswrapper[4792]: I0929 18:58:56.790125 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-dvq4s" event={"ID":"391aee9c-3245-49c5-a150-9d95b16b3c61","Type":"ContainerStarted","Data":"24cf1b4600a99bc148e667ae26d14e04f2589310fb6bcb5874ed4e9c33db5d45"} Sep 29 18:58:56 crc kubenswrapper[4792]: I0929 18:58:56.792182 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd-operator/etcd-operator-b45778765-w569t" event={"ID":"09b3ca22-55ce-4f12-9f35-308b6020819f","Type":"ContainerStarted","Data":"b3490df09448e16fd9b2ea8fde567e11eab056ea1a304561ef324b0e1c0b15f9"} Sep 29 18:58:56 crc kubenswrapper[4792]: I0929 18:58:56.795278 4792 generic.go:334] "Generic (PLEG): container finished" podID="0e5e4bed-dea6-4ddd-80a9-2aac8e7f8909" containerID="dbcd915a8de3e9cfbc805dc7c643f3a909fffbd2702f8e3e8e14288ac4ade0ad" exitCode=0 Sep 29 18:58:56 crc kubenswrapper[4792]: I0929 18:58:56.796096 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-ktzhf" event={"ID":"0e5e4bed-dea6-4ddd-80a9-2aac8e7f8909","Type":"ContainerStarted","Data":"aedaa97fbfac1a7ff1bfca73e9e262aad4eeb50cb879477bae407d993df8037a"} Sep 29 18:58:56 crc kubenswrapper[4792]: I0929 18:58:56.796200 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-ktzhf" event={"ID":"0e5e4bed-dea6-4ddd-80a9-2aac8e7f8909","Type":"ContainerDied","Data":"dbcd915a8de3e9cfbc805dc7c643f3a909fffbd2702f8e3e8e14288ac4ade0ad"} Sep 29 18:58:56 crc kubenswrapper[4792]: I0929 18:58:56.796291 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-config-operator/openshift-config-operator-7777fb866f-ktzhf" Sep 29 18:58:56 crc kubenswrapper[4792]: I0929 18:58:56.799155 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-sp272" event={"ID":"5a99395b-1e1f-425f-b934-8ad850a2e8a5","Type":"ContainerStarted","Data":"e1c14ad9c47bb9a972be512bceedea89e01b03cbe7b17c4454939ceed0a49020"} Sep 29 18:58:56 crc kubenswrapper[4792]: I0929 18:58:56.804393 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns/dns-default-kmxdh" podStartSLOduration=9.804367266 podStartE2EDuration="9.804367266s" podCreationTimestamp="2025-09-29 18:58:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 18:58:56.801065676 +0000 UTC m=+148.794373082" watchObservedRunningTime="2025-09-29 18:58:56.804367266 +0000 UTC m=+148.797674662" Sep 29 18:58:56 crc kubenswrapper[4792]: I0929 18:58:56.809344 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-vw7wd" event={"ID":"4d8c1f74-cc55-4f70-afea-f177b99ec47c","Type":"ContainerStarted","Data":"04ac256e9d1654e6d8455945390e4d566ff39aecb293924aefca8d1dddf6b043"} Sep 29 18:58:56 crc kubenswrapper[4792]: I0929 18:58:56.809683 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-vw7wd" event={"ID":"4d8c1f74-cc55-4f70-afea-f177b99ec47c","Type":"ContainerStarted","Data":"b200d372358c6589a68240973e4fb4416390ed63eba10cfb0d32405e07426824"} Sep 29 18:58:56 crc kubenswrapper[4792]: I0929 18:58:56.812930 4792 patch_prober.go:28] interesting pod/console-operator-58897d9998-bpj7h container/console-operator namespace/openshift-console-operator: Readiness probe status=failure output="Get \"https://10.217.0.36:8443/readyz\": dial tcp 10.217.0.36:8443: connect: connection refused" start-of-body= Sep 29 18:58:56 crc kubenswrapper[4792]: I0929 18:58:56.812980 4792 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console-operator/console-operator-58897d9998-bpj7h" podUID="c465eadf-19c1-417c-bf6e-8f4eb6d26338" containerName="console-operator" probeResult="failure" output="Get \"https://10.217.0.36:8443/readyz\": dial tcp 10.217.0.36:8443: connect: connection refused" Sep 29 18:58:56 crc kubenswrapper[4792]: I0929 18:58:56.814539 4792 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-6r69d container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.17:8080/healthz\": dial tcp 10.217.0.17:8080: connect: connection refused" start-of-body= Sep 29 18:58:56 crc kubenswrapper[4792]: I0929 18:58:56.814572 4792 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-6r69d" podUID="502b8b77-8c80-4cc0-8590-6fb9ce342289" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.17:8080/healthz\": dial tcp 10.217.0.17:8080: connect: connection refused" Sep 29 18:58:56 crc kubenswrapper[4792]: I0929 18:58:56.826640 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-w22fq" Sep 29 18:58:56 crc kubenswrapper[4792]: I0929 18:58:56.834619 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 18:58:56 crc kubenswrapper[4792]: E0929 18:58:56.834952 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 18:58:57.334922849 +0000 UTC m=+149.328230245 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 18:58:56 crc kubenswrapper[4792]: I0929 18:58:56.835442 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-fnpzd\" (UID: \"b3ccc1f5-4945-4a14-8f84-363683bbd575\") " pod="openshift-image-registry/image-registry-697d97f7c8-fnpzd" Sep 29 18:58:56 crc kubenswrapper[4792]: I0929 18:58:56.835572 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 18:58:56 crc kubenswrapper[4792]: E0929 18:58:56.843509 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 18:58:57.343489822 +0000 UTC m=+149.336797218 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-fnpzd" (UID: "b3ccc1f5-4945-4a14-8f84-363683bbd575") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 18:58:56 crc kubenswrapper[4792]: I0929 18:58:56.856543 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 18:58:56 crc kubenswrapper[4792]: I0929 18:58:56.857227 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/downloads-7954f5f757-7phmq" podStartSLOduration=123.857217396 podStartE2EDuration="2m3.857217396s" podCreationTimestamp="2025-09-29 18:56:53 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 18:58:56.855384946 +0000 UTC m=+148.848692362" watchObservedRunningTime="2025-09-29 18:58:56.857217396 +0000 UTC m=+148.850524792" Sep 29 18:58:56 crc kubenswrapper[4792]: I0929 18:58:56.892443 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress-canary/ingress-canary-f6x5z" podStartSLOduration=9.892426355 podStartE2EDuration="9.892426355s" podCreationTimestamp="2025-09-29 18:58:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 18:58:56.884201351 +0000 UTC m=+148.877508747" watchObservedRunningTime="2025-09-29 18:58:56.892426355 +0000 UTC m=+148.885733741" Sep 29 18:58:56 crc kubenswrapper[4792]: I0929 18:58:56.938041 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-p4lzh" Sep 29 18:58:56 crc kubenswrapper[4792]: I0929 18:58:56.938449 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 18:58:56 crc kubenswrapper[4792]: I0929 18:58:56.938668 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 18:58:56 crc kubenswrapper[4792]: I0929 18:58:56.938711 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 18:58:56 crc kubenswrapper[4792]: I0929 18:58:56.938742 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 18:58:56 crc kubenswrapper[4792]: E0929 18:58:56.939556 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 18:58:57.439540269 +0000 UTC m=+149.432847665 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 18:58:56 crc kubenswrapper[4792]: I0929 18:58:56.941057 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 18:58:56 crc kubenswrapper[4792]: I0929 18:58:56.944499 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 18:58:56 crc kubenswrapper[4792]: I0929 18:58:56.951415 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-etcd-operator/etcd-operator-b45778765-w569t" podStartSLOduration=123.951395972 podStartE2EDuration="2m3.951395972s" podCreationTimestamp="2025-09-29 18:56:53 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 18:58:56.94031677 +0000 UTC m=+148.933624186" watchObservedRunningTime="2025-09-29 18:58:56.951395972 +0000 UTC m=+148.944703368" Sep 29 18:58:56 crc kubenswrapper[4792]: I0929 18:58:56.971678 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 18:58:57 crc kubenswrapper[4792]: I0929 18:58:57.031211 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-config-operator/openshift-config-operator-7777fb866f-ktzhf" podStartSLOduration=125.031191497 podStartE2EDuration="2m5.031191497s" podCreationTimestamp="2025-09-29 18:56:52 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 18:58:57.024289469 +0000 UTC m=+149.017596875" watchObservedRunningTime="2025-09-29 18:58:57.031191497 +0000 UTC m=+149.024498893" Sep 29 18:58:57 crc kubenswrapper[4792]: I0929 18:58:57.032240 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-admission-controller-857f4d67dd-dvq4s" podStartSLOduration=124.032233105 podStartE2EDuration="2m4.032233105s" podCreationTimestamp="2025-09-29 18:56:53 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 18:58:56.993382616 +0000 UTC m=+148.986690012" watchObservedRunningTime="2025-09-29 18:58:57.032233105 +0000 UTC m=+149.025540491" Sep 29 18:58:57 crc kubenswrapper[4792]: I0929 18:58:57.033029 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 18:58:57 crc kubenswrapper[4792]: I0929 18:58:57.039827 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-fnpzd\" (UID: \"b3ccc1f5-4945-4a14-8f84-363683bbd575\") " pod="openshift-image-registry/image-registry-697d97f7c8-fnpzd" Sep 29 18:58:57 crc kubenswrapper[4792]: E0929 18:58:57.040132 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 18:58:57.54012 +0000 UTC m=+149.533427396 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-fnpzd" (UID: "b3ccc1f5-4945-4a14-8f84-363683bbd575") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 18:58:57 crc kubenswrapper[4792]: I0929 18:58:57.074603 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 18:58:57 crc kubenswrapper[4792]: I0929 18:58:57.076211 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 18:58:57 crc kubenswrapper[4792]: I0929 18:58:57.141679 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 18:58:57 crc kubenswrapper[4792]: E0929 18:58:57.142019 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 18:58:57.642003226 +0000 UTC m=+149.635310622 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 18:58:57 crc kubenswrapper[4792]: I0929 18:58:57.146297 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns-operator/dns-operator-744455d44c-vw7wd" podStartSLOduration=124.146277763 podStartE2EDuration="2m4.146277763s" podCreationTimestamp="2025-09-29 18:56:53 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 18:58:57.066403976 +0000 UTC m=+149.059711392" watchObservedRunningTime="2025-09-29 18:58:57.146277763 +0000 UTC m=+149.139585159" Sep 29 18:58:57 crc kubenswrapper[4792]: I0929 18:58:57.245102 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-fnpzd\" (UID: \"b3ccc1f5-4945-4a14-8f84-363683bbd575\") " pod="openshift-image-registry/image-registry-697d97f7c8-fnpzd" Sep 29 18:58:57 crc kubenswrapper[4792]: E0929 18:58:57.245438 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 18:58:57.745426814 +0000 UTC m=+149.738734210 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-fnpzd" (UID: "b3ccc1f5-4945-4a14-8f84-363683bbd575") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 18:58:57 crc kubenswrapper[4792]: I0929 18:58:57.338876 4792 patch_prober.go:28] interesting pod/router-default-5444994796-ldssp container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Sep 29 18:58:57 crc kubenswrapper[4792]: [-]has-synced failed: reason withheld Sep 29 18:58:57 crc kubenswrapper[4792]: [+]process-running ok Sep 29 18:58:57 crc kubenswrapper[4792]: healthz check failed Sep 29 18:58:57 crc kubenswrapper[4792]: I0929 18:58:57.338936 4792 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-ldssp" podUID="1b36a633-3ac6-4670-aa21-b5e3f750484f" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Sep 29 18:58:57 crc kubenswrapper[4792]: I0929 18:58:57.347996 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 18:58:57 crc kubenswrapper[4792]: E0929 18:58:57.348219 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 18:58:57.848152623 +0000 UTC m=+149.841460029 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 18:58:57 crc kubenswrapper[4792]: I0929 18:58:57.348327 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-fnpzd\" (UID: \"b3ccc1f5-4945-4a14-8f84-363683bbd575\") " pod="openshift-image-registry/image-registry-697d97f7c8-fnpzd" Sep 29 18:58:57 crc kubenswrapper[4792]: E0929 18:58:57.348783 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 18:58:57.84877661 +0000 UTC m=+149.842083996 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-fnpzd" (UID: "b3ccc1f5-4945-4a14-8f84-363683bbd575") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 18:58:57 crc kubenswrapper[4792]: I0929 18:58:57.449959 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 18:58:57 crc kubenswrapper[4792]: E0929 18:58:57.450296 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 18:58:57.950270606 +0000 UTC m=+149.943578002 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 18:58:57 crc kubenswrapper[4792]: I0929 18:58:57.450494 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-fnpzd\" (UID: \"b3ccc1f5-4945-4a14-8f84-363683bbd575\") " pod="openshift-image-registry/image-registry-697d97f7c8-fnpzd" Sep 29 18:58:57 crc kubenswrapper[4792]: E0929 18:58:57.450787 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 18:58:57.95077656 +0000 UTC m=+149.944083946 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-fnpzd" (UID: "b3ccc1f5-4945-4a14-8f84-363683bbd575") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 18:58:57 crc kubenswrapper[4792]: I0929 18:58:57.551653 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 18:58:57 crc kubenswrapper[4792]: E0929 18:58:57.551951 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 18:58:58.051934326 +0000 UTC m=+150.045241722 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 18:58:57 crc kubenswrapper[4792]: I0929 18:58:57.654759 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-fnpzd\" (UID: \"b3ccc1f5-4945-4a14-8f84-363683bbd575\") " pod="openshift-image-registry/image-registry-697d97f7c8-fnpzd" Sep 29 18:58:57 crc kubenswrapper[4792]: E0929 18:58:57.655187 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 18:58:58.155156669 +0000 UTC m=+150.148464065 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-fnpzd" (UID: "b3ccc1f5-4945-4a14-8f84-363683bbd575") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 18:58:57 crc kubenswrapper[4792]: I0929 18:58:57.756255 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 18:58:57 crc kubenswrapper[4792]: E0929 18:58:57.756874 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 18:58:58.25685869 +0000 UTC m=+150.250166086 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 18:58:57 crc kubenswrapper[4792]: I0929 18:58:57.813522 4792 patch_prober.go:28] interesting pod/oauth-openshift-558db77b4-4pj4c container/oauth-openshift namespace/openshift-authentication: Readiness probe status=failure output="Get \"https://10.217.0.41:6443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Sep 29 18:58:57 crc kubenswrapper[4792]: I0929 18:58:57.813574 4792 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-authentication/oauth-openshift-558db77b4-4pj4c" podUID="56d0b0e8-6440-4f28-9d05-ad7be713a117" containerName="oauth-openshift" probeResult="failure" output="Get \"https://10.217.0.41:6443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Sep 29 18:58:57 crc kubenswrapper[4792]: I0929 18:58:57.819194 4792 patch_prober.go:28] interesting pod/downloads-7954f5f757-7phmq container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.42:8080/\": dial tcp 10.217.0.42:8080: connect: connection refused" start-of-body= Sep 29 18:58:57 crc kubenswrapper[4792]: I0929 18:58:57.819224 4792 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-7phmq" podUID="662a9f05-f1a7-4d9d-8b42-daadfeddb122" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.42:8080/\": dial tcp 10.217.0.42:8080: connect: connection refused" Sep 29 18:58:57 crc kubenswrapper[4792]: I0929 18:58:57.858540 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-fnpzd\" (UID: \"b3ccc1f5-4945-4a14-8f84-363683bbd575\") " pod="openshift-image-registry/image-registry-697d97f7c8-fnpzd" Sep 29 18:58:57 crc kubenswrapper[4792]: E0929 18:58:57.860076 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 18:58:58.360064432 +0000 UTC m=+150.353371828 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-fnpzd" (UID: "b3ccc1f5-4945-4a14-8f84-363683bbd575") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 18:58:57 crc kubenswrapper[4792]: I0929 18:58:57.959361 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 18:58:57 crc kubenswrapper[4792]: E0929 18:58:57.959627 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 18:58:58.459611955 +0000 UTC m=+150.452919351 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 18:58:58 crc kubenswrapper[4792]: I0929 18:58:58.061301 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-fnpzd\" (UID: \"b3ccc1f5-4945-4a14-8f84-363683bbd575\") " pod="openshift-image-registry/image-registry-697d97f7c8-fnpzd" Sep 29 18:58:58 crc kubenswrapper[4792]: E0929 18:58:58.061769 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 18:58:58.561752618 +0000 UTC m=+150.555060014 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-fnpzd" (UID: "b3ccc1f5-4945-4a14-8f84-363683bbd575") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 18:58:58 crc kubenswrapper[4792]: I0929 18:58:58.163062 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 18:58:58 crc kubenswrapper[4792]: E0929 18:58:58.163404 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 18:58:58.663386638 +0000 UTC m=+150.656694034 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 18:58:58 crc kubenswrapper[4792]: I0929 18:58:58.198985 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-wmjgp"] Sep 29 18:58:58 crc kubenswrapper[4792]: I0929 18:58:58.199892 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-wmjgp" Sep 29 18:58:58 crc kubenswrapper[4792]: W0929 18:58:58.211543 4792 reflector.go:561] object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g": failed to list *v1.Secret: secrets "certified-operators-dockercfg-4rs5g" is forbidden: User "system:node:crc" cannot list resource "secrets" in API group "" in the namespace "openshift-marketplace": no relationship found between node 'crc' and this object Sep 29 18:58:58 crc kubenswrapper[4792]: E0929 18:58:58.211583 4792 reflector.go:158] "Unhandled Error" err="object-\"openshift-marketplace\"/\"certified-operators-dockercfg-4rs5g\": Failed to watch *v1.Secret: failed to list *v1.Secret: secrets \"certified-operators-dockercfg-4rs5g\" is forbidden: User \"system:node:crc\" cannot list resource \"secrets\" in API group \"\" in the namespace \"openshift-marketplace\": no relationship found between node 'crc' and this object" logger="UnhandledError" Sep 29 18:58:58 crc kubenswrapper[4792]: I0929 18:58:58.235783 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-wmjgp"] Sep 29 18:58:58 crc kubenswrapper[4792]: I0929 18:58:58.265500 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-fnpzd\" (UID: \"b3ccc1f5-4945-4a14-8f84-363683bbd575\") " pod="openshift-image-registry/image-registry-697d97f7c8-fnpzd" Sep 29 18:58:58 crc kubenswrapper[4792]: I0929 18:58:58.265919 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c4e9ba50-617f-4f99-9430-ee6fb3d21b8e-catalog-content\") pod \"certified-operators-wmjgp\" (UID: \"c4e9ba50-617f-4f99-9430-ee6fb3d21b8e\") " pod="openshift-marketplace/certified-operators-wmjgp" Sep 29 18:58:58 crc kubenswrapper[4792]: I0929 18:58:58.265967 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c4e9ba50-617f-4f99-9430-ee6fb3d21b8e-utilities\") pod \"certified-operators-wmjgp\" (UID: \"c4e9ba50-617f-4f99-9430-ee6fb3d21b8e\") " pod="openshift-marketplace/certified-operators-wmjgp" Sep 29 18:58:58 crc kubenswrapper[4792]: I0929 18:58:58.265985 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2gmfg\" (UniqueName: \"kubernetes.io/projected/c4e9ba50-617f-4f99-9430-ee6fb3d21b8e-kube-api-access-2gmfg\") pod \"certified-operators-wmjgp\" (UID: \"c4e9ba50-617f-4f99-9430-ee6fb3d21b8e\") " pod="openshift-marketplace/certified-operators-wmjgp" Sep 29 18:58:58 crc kubenswrapper[4792]: E0929 18:58:58.266295 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 18:58:58.7662822 +0000 UTC m=+150.759589596 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-fnpzd" (UID: "b3ccc1f5-4945-4a14-8f84-363683bbd575") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 18:58:58 crc kubenswrapper[4792]: I0929 18:58:58.350340 4792 patch_prober.go:28] interesting pod/router-default-5444994796-ldssp container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Sep 29 18:58:58 crc kubenswrapper[4792]: [-]has-synced failed: reason withheld Sep 29 18:58:58 crc kubenswrapper[4792]: [+]process-running ok Sep 29 18:58:58 crc kubenswrapper[4792]: healthz check failed Sep 29 18:58:58 crc kubenswrapper[4792]: I0929 18:58:58.350380 4792 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-ldssp" podUID="1b36a633-3ac6-4670-aa21-b5e3f750484f" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Sep 29 18:58:58 crc kubenswrapper[4792]: I0929 18:58:58.366844 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 18:58:58 crc kubenswrapper[4792]: I0929 18:58:58.367041 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c4e9ba50-617f-4f99-9430-ee6fb3d21b8e-catalog-content\") pod \"certified-operators-wmjgp\" (UID: \"c4e9ba50-617f-4f99-9430-ee6fb3d21b8e\") " pod="openshift-marketplace/certified-operators-wmjgp" Sep 29 18:58:58 crc kubenswrapper[4792]: I0929 18:58:58.367097 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c4e9ba50-617f-4f99-9430-ee6fb3d21b8e-utilities\") pod \"certified-operators-wmjgp\" (UID: \"c4e9ba50-617f-4f99-9430-ee6fb3d21b8e\") " pod="openshift-marketplace/certified-operators-wmjgp" Sep 29 18:58:58 crc kubenswrapper[4792]: I0929 18:58:58.367114 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2gmfg\" (UniqueName: \"kubernetes.io/projected/c4e9ba50-617f-4f99-9430-ee6fb3d21b8e-kube-api-access-2gmfg\") pod \"certified-operators-wmjgp\" (UID: \"c4e9ba50-617f-4f99-9430-ee6fb3d21b8e\") " pod="openshift-marketplace/certified-operators-wmjgp" Sep 29 18:58:58 crc kubenswrapper[4792]: E0929 18:58:58.367466 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 18:58:58.867449747 +0000 UTC m=+150.860757133 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 18:58:58 crc kubenswrapper[4792]: I0929 18:58:58.367791 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c4e9ba50-617f-4f99-9430-ee6fb3d21b8e-catalog-content\") pod \"certified-operators-wmjgp\" (UID: \"c4e9ba50-617f-4f99-9430-ee6fb3d21b8e\") " pod="openshift-marketplace/certified-operators-wmjgp" Sep 29 18:58:58 crc kubenswrapper[4792]: I0929 18:58:58.368010 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c4e9ba50-617f-4f99-9430-ee6fb3d21b8e-utilities\") pod \"certified-operators-wmjgp\" (UID: \"c4e9ba50-617f-4f99-9430-ee6fb3d21b8e\") " pod="openshift-marketplace/certified-operators-wmjgp" Sep 29 18:58:58 crc kubenswrapper[4792]: I0929 18:58:58.396297 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-llhzn"] Sep 29 18:58:58 crc kubenswrapper[4792]: I0929 18:58:58.397162 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-llhzn" Sep 29 18:58:58 crc kubenswrapper[4792]: I0929 18:58:58.416215 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2gmfg\" (UniqueName: \"kubernetes.io/projected/c4e9ba50-617f-4f99-9430-ee6fb3d21b8e-kube-api-access-2gmfg\") pod \"certified-operators-wmjgp\" (UID: \"c4e9ba50-617f-4f99-9430-ee6fb3d21b8e\") " pod="openshift-marketplace/certified-operators-wmjgp" Sep 29 18:58:58 crc kubenswrapper[4792]: I0929 18:58:58.416721 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Sep 29 18:58:58 crc kubenswrapper[4792]: I0929 18:58:58.419690 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-llhzn"] Sep 29 18:58:58 crc kubenswrapper[4792]: I0929 18:58:58.467924 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/de0a9077-e8b8-4b2c-bfdf-4e965627f520-catalog-content\") pod \"community-operators-llhzn\" (UID: \"de0a9077-e8b8-4b2c-bfdf-4e965627f520\") " pod="openshift-marketplace/community-operators-llhzn" Sep 29 18:58:58 crc kubenswrapper[4792]: I0929 18:58:58.468032 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cr2rq\" (UniqueName: \"kubernetes.io/projected/de0a9077-e8b8-4b2c-bfdf-4e965627f520-kube-api-access-cr2rq\") pod \"community-operators-llhzn\" (UID: \"de0a9077-e8b8-4b2c-bfdf-4e965627f520\") " pod="openshift-marketplace/community-operators-llhzn" Sep 29 18:58:58 crc kubenswrapper[4792]: I0929 18:58:58.468069 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/de0a9077-e8b8-4b2c-bfdf-4e965627f520-utilities\") pod \"community-operators-llhzn\" (UID: \"de0a9077-e8b8-4b2c-bfdf-4e965627f520\") " pod="openshift-marketplace/community-operators-llhzn" Sep 29 18:58:58 crc kubenswrapper[4792]: I0929 18:58:58.468115 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-fnpzd\" (UID: \"b3ccc1f5-4945-4a14-8f84-363683bbd575\") " pod="openshift-image-registry/image-registry-697d97f7c8-fnpzd" Sep 29 18:58:58 crc kubenswrapper[4792]: E0929 18:58:58.468419 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 18:58:58.968404908 +0000 UTC m=+150.961712304 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-fnpzd" (UID: "b3ccc1f5-4945-4a14-8f84-363683bbd575") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 18:58:58 crc kubenswrapper[4792]: I0929 18:58:58.504921 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-558db77b4-4pj4c" Sep 29 18:58:58 crc kubenswrapper[4792]: I0929 18:58:58.529066 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-apiserver/apiserver-76f77b778f-4bgtx" Sep 29 18:58:58 crc kubenswrapper[4792]: I0929 18:58:58.547365 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-apiserver/apiserver-76f77b778f-4bgtx" Sep 29 18:58:58 crc kubenswrapper[4792]: I0929 18:58:58.572207 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 18:58:58 crc kubenswrapper[4792]: I0929 18:58:58.572511 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/de0a9077-e8b8-4b2c-bfdf-4e965627f520-catalog-content\") pod \"community-operators-llhzn\" (UID: \"de0a9077-e8b8-4b2c-bfdf-4e965627f520\") " pod="openshift-marketplace/community-operators-llhzn" Sep 29 18:58:58 crc kubenswrapper[4792]: I0929 18:58:58.572637 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cr2rq\" (UniqueName: \"kubernetes.io/projected/de0a9077-e8b8-4b2c-bfdf-4e965627f520-kube-api-access-cr2rq\") pod \"community-operators-llhzn\" (UID: \"de0a9077-e8b8-4b2c-bfdf-4e965627f520\") " pod="openshift-marketplace/community-operators-llhzn" Sep 29 18:58:58 crc kubenswrapper[4792]: I0929 18:58:58.572772 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/de0a9077-e8b8-4b2c-bfdf-4e965627f520-utilities\") pod \"community-operators-llhzn\" (UID: \"de0a9077-e8b8-4b2c-bfdf-4e965627f520\") " pod="openshift-marketplace/community-operators-llhzn" Sep 29 18:58:58 crc kubenswrapper[4792]: I0929 18:58:58.572998 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/de0a9077-e8b8-4b2c-bfdf-4e965627f520-catalog-content\") pod \"community-operators-llhzn\" (UID: \"de0a9077-e8b8-4b2c-bfdf-4e965627f520\") " pod="openshift-marketplace/community-operators-llhzn" Sep 29 18:58:58 crc kubenswrapper[4792]: E0929 18:58:58.573115 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 18:58:59.073095421 +0000 UTC m=+151.066402817 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 18:58:58 crc kubenswrapper[4792]: I0929 18:58:58.606659 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/de0a9077-e8b8-4b2c-bfdf-4e965627f520-utilities\") pod \"community-operators-llhzn\" (UID: \"de0a9077-e8b8-4b2c-bfdf-4e965627f520\") " pod="openshift-marketplace/community-operators-llhzn" Sep 29 18:58:58 crc kubenswrapper[4792]: I0929 18:58:58.616837 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-f9k8v"] Sep 29 18:58:58 crc kubenswrapper[4792]: I0929 18:58:58.632320 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-f9k8v" Sep 29 18:58:58 crc kubenswrapper[4792]: I0929 18:58:58.665425 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-f9k8v"] Sep 29 18:58:58 crc kubenswrapper[4792]: I0929 18:58:58.676894 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/82bcbfe6-9f95-4749-80a0-81b82a1b78d9-catalog-content\") pod \"certified-operators-f9k8v\" (UID: \"82bcbfe6-9f95-4749-80a0-81b82a1b78d9\") " pod="openshift-marketplace/certified-operators-f9k8v" Sep 29 18:58:58 crc kubenswrapper[4792]: I0929 18:58:58.677059 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/82bcbfe6-9f95-4749-80a0-81b82a1b78d9-utilities\") pod \"certified-operators-f9k8v\" (UID: \"82bcbfe6-9f95-4749-80a0-81b82a1b78d9\") " pod="openshift-marketplace/certified-operators-f9k8v" Sep 29 18:58:58 crc kubenswrapper[4792]: I0929 18:58:58.677095 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ndfj9\" (UniqueName: \"kubernetes.io/projected/82bcbfe6-9f95-4749-80a0-81b82a1b78d9-kube-api-access-ndfj9\") pod \"certified-operators-f9k8v\" (UID: \"82bcbfe6-9f95-4749-80a0-81b82a1b78d9\") " pod="openshift-marketplace/certified-operators-f9k8v" Sep 29 18:58:58 crc kubenswrapper[4792]: I0929 18:58:58.677412 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-fnpzd\" (UID: \"b3ccc1f5-4945-4a14-8f84-363683bbd575\") " pod="openshift-image-registry/image-registry-697d97f7c8-fnpzd" Sep 29 18:58:58 crc kubenswrapper[4792]: E0929 18:58:58.678026 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 18:58:59.17801376 +0000 UTC m=+151.171321156 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-fnpzd" (UID: "b3ccc1f5-4945-4a14-8f84-363683bbd575") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 18:58:58 crc kubenswrapper[4792]: I0929 18:58:58.705158 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cr2rq\" (UniqueName: \"kubernetes.io/projected/de0a9077-e8b8-4b2c-bfdf-4e965627f520-kube-api-access-cr2rq\") pod \"community-operators-llhzn\" (UID: \"de0a9077-e8b8-4b2c-bfdf-4e965627f520\") " pod="openshift-marketplace/community-operators-llhzn" Sep 29 18:58:58 crc kubenswrapper[4792]: I0929 18:58:58.764637 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-llhzn" Sep 29 18:58:58 crc kubenswrapper[4792]: I0929 18:58:58.785346 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 18:58:58 crc kubenswrapper[4792]: I0929 18:58:58.785495 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/82bcbfe6-9f95-4749-80a0-81b82a1b78d9-catalog-content\") pod \"certified-operators-f9k8v\" (UID: \"82bcbfe6-9f95-4749-80a0-81b82a1b78d9\") " pod="openshift-marketplace/certified-operators-f9k8v" Sep 29 18:58:58 crc kubenswrapper[4792]: I0929 18:58:58.785532 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/82bcbfe6-9f95-4749-80a0-81b82a1b78d9-utilities\") pod \"certified-operators-f9k8v\" (UID: \"82bcbfe6-9f95-4749-80a0-81b82a1b78d9\") " pod="openshift-marketplace/certified-operators-f9k8v" Sep 29 18:58:58 crc kubenswrapper[4792]: I0929 18:58:58.785563 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ndfj9\" (UniqueName: \"kubernetes.io/projected/82bcbfe6-9f95-4749-80a0-81b82a1b78d9-kube-api-access-ndfj9\") pod \"certified-operators-f9k8v\" (UID: \"82bcbfe6-9f95-4749-80a0-81b82a1b78d9\") " pod="openshift-marketplace/certified-operators-f9k8v" Sep 29 18:58:58 crc kubenswrapper[4792]: E0929 18:58:58.785935 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 18:58:59.28591934 +0000 UTC m=+151.279226726 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 18:58:58 crc kubenswrapper[4792]: I0929 18:58:58.786260 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/82bcbfe6-9f95-4749-80a0-81b82a1b78d9-catalog-content\") pod \"certified-operators-f9k8v\" (UID: \"82bcbfe6-9f95-4749-80a0-81b82a1b78d9\") " pod="openshift-marketplace/certified-operators-f9k8v" Sep 29 18:58:58 crc kubenswrapper[4792]: I0929 18:58:58.786467 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/82bcbfe6-9f95-4749-80a0-81b82a1b78d9-utilities\") pod \"certified-operators-f9k8v\" (UID: \"82bcbfe6-9f95-4749-80a0-81b82a1b78d9\") " pod="openshift-marketplace/certified-operators-f9k8v" Sep 29 18:58:58 crc kubenswrapper[4792]: I0929 18:58:58.819273 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-grgxc"] Sep 29 18:58:58 crc kubenswrapper[4792]: I0929 18:58:58.820217 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-grgxc" Sep 29 18:58:58 crc kubenswrapper[4792]: I0929 18:58:58.837071 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ndfj9\" (UniqueName: \"kubernetes.io/projected/82bcbfe6-9f95-4749-80a0-81b82a1b78d9-kube-api-access-ndfj9\") pod \"certified-operators-f9k8v\" (UID: \"82bcbfe6-9f95-4749-80a0-81b82a1b78d9\") " pod="openshift-marketplace/certified-operators-f9k8v" Sep 29 18:58:58 crc kubenswrapper[4792]: I0929 18:58:58.859281 4792 generic.go:334] "Generic (PLEG): container finished" podID="a5fc467b-0e4c-4f20-9729-56906756b33d" containerID="826268197f27b99f4272acfc3e772be0e0fad546ed949d05bf541bdd9b1984bc" exitCode=0 Sep 29 18:58:58 crc kubenswrapper[4792]: I0929 18:58:58.859357 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29319525-kwqwh" event={"ID":"a5fc467b-0e4c-4f20-9729-56906756b33d","Type":"ContainerDied","Data":"826268197f27b99f4272acfc3e772be0e0fad546ed949d05bf541bdd9b1984bc"} Sep 29 18:58:58 crc kubenswrapper[4792]: I0929 18:58:58.875429 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" event={"ID":"3b6479f0-333b-4a96-9adf-2099afdc2447","Type":"ContainerStarted","Data":"557f0bc07e45695da7055d5075a5b1d83d9f2ad4b02854c2434cda133cf4e1a9"} Sep 29 18:58:58 crc kubenswrapper[4792]: I0929 18:58:58.893616 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/51c4e50f-73c5-485d-a704-b0454f9cfde5-utilities\") pod \"community-operators-grgxc\" (UID: \"51c4e50f-73c5-485d-a704-b0454f9cfde5\") " pod="openshift-marketplace/community-operators-grgxc" Sep 29 18:58:58 crc kubenswrapper[4792]: I0929 18:58:58.893661 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/51c4e50f-73c5-485d-a704-b0454f9cfde5-catalog-content\") pod \"community-operators-grgxc\" (UID: \"51c4e50f-73c5-485d-a704-b0454f9cfde5\") " pod="openshift-marketplace/community-operators-grgxc" Sep 29 18:58:58 crc kubenswrapper[4792]: I0929 18:58:58.893694 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qjgzc\" (UniqueName: \"kubernetes.io/projected/51c4e50f-73c5-485d-a704-b0454f9cfde5-kube-api-access-qjgzc\") pod \"community-operators-grgxc\" (UID: \"51c4e50f-73c5-485d-a704-b0454f9cfde5\") " pod="openshift-marketplace/community-operators-grgxc" Sep 29 18:58:58 crc kubenswrapper[4792]: I0929 18:58:58.893720 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-fnpzd\" (UID: \"b3ccc1f5-4945-4a14-8f84-363683bbd575\") " pod="openshift-image-registry/image-registry-697d97f7c8-fnpzd" Sep 29 18:58:58 crc kubenswrapper[4792]: E0929 18:58:58.894045 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 18:58:59.394019905 +0000 UTC m=+151.387327301 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-fnpzd" (UID: "b3ccc1f5-4945-4a14-8f84-363683bbd575") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 18:58:58 crc kubenswrapper[4792]: I0929 18:58:58.903520 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-xkp78" event={"ID":"d813bd44-0760-4757-95da-beced796238f","Type":"ContainerStarted","Data":"f9ad3062845235dfa2f6a7193a5e77792ca52d58f894362e95c16e28ec9d3d0e"} Sep 29 18:58:58 crc kubenswrapper[4792]: I0929 18:58:58.909192 4792 patch_prober.go:28] interesting pod/downloads-7954f5f757-7phmq container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.42:8080/\": dial tcp 10.217.0.42:8080: connect: connection refused" start-of-body= Sep 29 18:58:58 crc kubenswrapper[4792]: I0929 18:58:58.909255 4792 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-7phmq" podUID="662a9f05-f1a7-4d9d-8b42-daadfeddb122" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.42:8080/\": dial tcp 10.217.0.42:8080: connect: connection refused" Sep 29 18:58:58 crc kubenswrapper[4792]: I0929 18:58:58.922120 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-grgxc"] Sep 29 18:58:59 crc kubenswrapper[4792]: I0929 18:58:58.994880 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 18:58:59 crc kubenswrapper[4792]: I0929 18:58:58.995102 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qjgzc\" (UniqueName: \"kubernetes.io/projected/51c4e50f-73c5-485d-a704-b0454f9cfde5-kube-api-access-qjgzc\") pod \"community-operators-grgxc\" (UID: \"51c4e50f-73c5-485d-a704-b0454f9cfde5\") " pod="openshift-marketplace/community-operators-grgxc" Sep 29 18:58:59 crc kubenswrapper[4792]: I0929 18:58:58.995375 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/51c4e50f-73c5-485d-a704-b0454f9cfde5-utilities\") pod \"community-operators-grgxc\" (UID: \"51c4e50f-73c5-485d-a704-b0454f9cfde5\") " pod="openshift-marketplace/community-operators-grgxc" Sep 29 18:58:59 crc kubenswrapper[4792]: I0929 18:58:58.995443 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/51c4e50f-73c5-485d-a704-b0454f9cfde5-catalog-content\") pod \"community-operators-grgxc\" (UID: \"51c4e50f-73c5-485d-a704-b0454f9cfde5\") " pod="openshift-marketplace/community-operators-grgxc" Sep 29 18:58:59 crc kubenswrapper[4792]: E0929 18:58:58.999207 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 18:58:59.499184311 +0000 UTC m=+151.492491707 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 18:58:59 crc kubenswrapper[4792]: I0929 18:58:59.004366 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/51c4e50f-73c5-485d-a704-b0454f9cfde5-catalog-content\") pod \"community-operators-grgxc\" (UID: \"51c4e50f-73c5-485d-a704-b0454f9cfde5\") " pod="openshift-marketplace/community-operators-grgxc" Sep 29 18:58:59 crc kubenswrapper[4792]: I0929 18:58:59.004415 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/51c4e50f-73c5-485d-a704-b0454f9cfde5-utilities\") pod \"community-operators-grgxc\" (UID: \"51c4e50f-73c5-485d-a704-b0454f9cfde5\") " pod="openshift-marketplace/community-operators-grgxc" Sep 29 18:58:59 crc kubenswrapper[4792]: I0929 18:58:59.058914 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qjgzc\" (UniqueName: \"kubernetes.io/projected/51c4e50f-73c5-485d-a704-b0454f9cfde5-kube-api-access-qjgzc\") pod \"community-operators-grgxc\" (UID: \"51c4e50f-73c5-485d-a704-b0454f9cfde5\") " pod="openshift-marketplace/community-operators-grgxc" Sep 29 18:58:59 crc kubenswrapper[4792]: I0929 18:58:59.100487 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-fnpzd\" (UID: \"b3ccc1f5-4945-4a14-8f84-363683bbd575\") " pod="openshift-image-registry/image-registry-697d97f7c8-fnpzd" Sep 29 18:58:59 crc kubenswrapper[4792]: E0929 18:58:59.100800 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 18:58:59.60078843 +0000 UTC m=+151.594095826 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-fnpzd" (UID: "b3ccc1f5-4945-4a14-8f84-363683bbd575") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 18:58:59 crc kubenswrapper[4792]: I0929 18:58:59.139450 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-grgxc" Sep 29 18:58:59 crc kubenswrapper[4792]: I0929 18:58:59.201981 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 18:58:59 crc kubenswrapper[4792]: E0929 18:58:59.202367 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 18:58:59.702349827 +0000 UTC m=+151.695657223 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 18:58:59 crc kubenswrapper[4792]: I0929 18:58:59.303252 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-fnpzd\" (UID: \"b3ccc1f5-4945-4a14-8f84-363683bbd575\") " pod="openshift-image-registry/image-registry-697d97f7c8-fnpzd" Sep 29 18:58:59 crc kubenswrapper[4792]: E0929 18:58:59.303609 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 18:58:59.803596926 +0000 UTC m=+151.796904322 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-fnpzd" (UID: "b3ccc1f5-4945-4a14-8f84-363683bbd575") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 18:58:59 crc kubenswrapper[4792]: I0929 18:58:59.341301 4792 patch_prober.go:28] interesting pod/router-default-5444994796-ldssp container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Sep 29 18:58:59 crc kubenswrapper[4792]: [-]has-synced failed: reason withheld Sep 29 18:58:59 crc kubenswrapper[4792]: [+]process-running ok Sep 29 18:58:59 crc kubenswrapper[4792]: healthz check failed Sep 29 18:58:59 crc kubenswrapper[4792]: I0929 18:58:59.341355 4792 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-ldssp" podUID="1b36a633-3ac6-4670-aa21-b5e3f750484f" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Sep 29 18:58:59 crc kubenswrapper[4792]: I0929 18:58:59.389325 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Sep 29 18:58:59 crc kubenswrapper[4792]: I0929 18:58:59.391865 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-f9k8v" Sep 29 18:58:59 crc kubenswrapper[4792]: I0929 18:58:59.396889 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-wmjgp" Sep 29 18:58:59 crc kubenswrapper[4792]: I0929 18:58:59.410397 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 18:58:59 crc kubenswrapper[4792]: E0929 18:58:59.410683 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 18:58:59.910666963 +0000 UTC m=+151.903974359 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 18:58:59 crc kubenswrapper[4792]: I0929 18:58:59.511675 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-fnpzd\" (UID: \"b3ccc1f5-4945-4a14-8f84-363683bbd575\") " pod="openshift-image-registry/image-registry-697d97f7c8-fnpzd" Sep 29 18:58:59 crc kubenswrapper[4792]: E0929 18:58:59.512122 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 18:59:00.012110968 +0000 UTC m=+152.005418364 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-fnpzd" (UID: "b3ccc1f5-4945-4a14-8f84-363683bbd575") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 18:58:59 crc kubenswrapper[4792]: I0929 18:58:59.612751 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 18:58:59 crc kubenswrapper[4792]: E0929 18:58:59.613529 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 18:59:00.11348894 +0000 UTC m=+152.106796326 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 18:58:59 crc kubenswrapper[4792]: I0929 18:58:59.662359 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Sep 29 18:58:59 crc kubenswrapper[4792]: I0929 18:58:59.663217 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Sep 29 18:58:59 crc kubenswrapper[4792]: I0929 18:58:59.670291 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager"/"kube-root-ca.crt" Sep 29 18:58:59 crc kubenswrapper[4792]: I0929 18:58:59.683538 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager"/"installer-sa-dockercfg-kjl2n" Sep 29 18:58:59 crc kubenswrapper[4792]: I0929 18:58:59.686692 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Sep 29 18:58:59 crc kubenswrapper[4792]: I0929 18:58:59.716295 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/5cc522d7-c98b-4faf-8962-0a0e7274e18f-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"5cc522d7-c98b-4faf-8962-0a0e7274e18f\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Sep 29 18:58:59 crc kubenswrapper[4792]: I0929 18:58:59.716764 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-fnpzd\" (UID: \"b3ccc1f5-4945-4a14-8f84-363683bbd575\") " pod="openshift-image-registry/image-registry-697d97f7c8-fnpzd" Sep 29 18:58:59 crc kubenswrapper[4792]: I0929 18:58:59.716881 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/5cc522d7-c98b-4faf-8962-0a0e7274e18f-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"5cc522d7-c98b-4faf-8962-0a0e7274e18f\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Sep 29 18:58:59 crc kubenswrapper[4792]: E0929 18:58:59.717359 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 18:59:00.21734191 +0000 UTC m=+152.210649306 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-fnpzd" (UID: "b3ccc1f5-4945-4a14-8f84-363683bbd575") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 18:58:59 crc kubenswrapper[4792]: I0929 18:58:59.826501 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 18:58:59 crc kubenswrapper[4792]: I0929 18:58:59.826659 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/5cc522d7-c98b-4faf-8962-0a0e7274e18f-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"5cc522d7-c98b-4faf-8962-0a0e7274e18f\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Sep 29 18:58:59 crc kubenswrapper[4792]: I0929 18:58:59.826719 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/5cc522d7-c98b-4faf-8962-0a0e7274e18f-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"5cc522d7-c98b-4faf-8962-0a0e7274e18f\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Sep 29 18:58:59 crc kubenswrapper[4792]: E0929 18:58:59.827118 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 18:59:00.327102471 +0000 UTC m=+152.320409867 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 18:58:59 crc kubenswrapper[4792]: I0929 18:58:59.827147 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/5cc522d7-c98b-4faf-8962-0a0e7274e18f-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"5cc522d7-c98b-4faf-8962-0a0e7274e18f\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Sep 29 18:58:59 crc kubenswrapper[4792]: I0929 18:58:59.928455 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-fnpzd\" (UID: \"b3ccc1f5-4945-4a14-8f84-363683bbd575\") " pod="openshift-image-registry/image-registry-697d97f7c8-fnpzd" Sep 29 18:58:59 crc kubenswrapper[4792]: E0929 18:58:59.929227 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 18:59:00.429214403 +0000 UTC m=+152.422521799 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-fnpzd" (UID: "b3ccc1f5-4945-4a14-8f84-363683bbd575") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 18:58:59 crc kubenswrapper[4792]: I0929 18:58:59.951199 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" event={"ID":"3b6479f0-333b-4a96-9adf-2099afdc2447","Type":"ContainerStarted","Data":"7c74c7564de3fb5508d5df0e23b3255913cd7548a7b5e4d31938af6724ff5471"} Sep 29 18:58:59 crc kubenswrapper[4792]: I0929 18:58:59.952922 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 18:58:59 crc kubenswrapper[4792]: I0929 18:58:59.956332 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/5cc522d7-c98b-4faf-8962-0a0e7274e18f-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"5cc522d7-c98b-4faf-8962-0a0e7274e18f\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Sep 29 18:58:59 crc kubenswrapper[4792]: I0929 18:58:59.974126 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" event={"ID":"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8","Type":"ContainerStarted","Data":"33f14b70cfd0e26102fde0f8fb4c4cc768cec3af82920b6e3896900847eadeeb"} Sep 29 18:58:59 crc kubenswrapper[4792]: I0929 18:58:59.974175 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" event={"ID":"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8","Type":"ContainerStarted","Data":"9ec114991a6c54ecf348bcd8edcd57d92dfaadd04b918c38fe44544dcc4fb16a"} Sep 29 18:58:59 crc kubenswrapper[4792]: I0929 18:58:59.978403 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-llhzn"] Sep 29 18:58:59 crc kubenswrapper[4792]: I0929 18:58:59.984708 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Sep 29 18:59:00 crc kubenswrapper[4792]: I0929 18:59:00.018556 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-xkp78" event={"ID":"d813bd44-0760-4757-95da-beced796238f","Type":"ContainerStarted","Data":"ebae6aac11eb7d333a2c7fa8374c92a49ad9879c7a650e51d4b2ed3dadee3ac7"} Sep 29 18:59:00 crc kubenswrapper[4792]: I0929 18:59:00.038205 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 18:59:00 crc kubenswrapper[4792]: E0929 18:59:00.039313 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 18:59:00.539285132 +0000 UTC m=+152.532592518 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 18:59:00 crc kubenswrapper[4792]: I0929 18:59:00.077186 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"4a730a870d75432d9a8b4afa605278fe259c01d0f5c15d1c7310be8cf0a5536b"} Sep 29 18:59:00 crc kubenswrapper[4792]: I0929 18:59:00.077273 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"8731af54653f2f6f20190fd17e9888e94b00395f945db0b527129b9f076d32db"} Sep 29 18:59:00 crc kubenswrapper[4792]: W0929 18:59:00.096002 4792 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podde0a9077_e8b8_4b2c_bfdf_4e965627f520.slice/crio-77b201a3a10d15a7d22a23e810153a5520eacd854960f6019cee8a231070e6fd WatchSource:0}: Error finding container 77b201a3a10d15a7d22a23e810153a5520eacd854960f6019cee8a231070e6fd: Status 404 returned error can't find the container with id 77b201a3a10d15a7d22a23e810153a5520eacd854960f6019cee8a231070e6fd Sep 29 18:59:00 crc kubenswrapper[4792]: I0929 18:59:00.161561 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-fnpzd\" (UID: \"b3ccc1f5-4945-4a14-8f84-363683bbd575\") " pod="openshift-image-registry/image-registry-697d97f7c8-fnpzd" Sep 29 18:59:00 crc kubenswrapper[4792]: E0929 18:59:00.168096 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 18:59:00.668080472 +0000 UTC m=+152.661387868 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-fnpzd" (UID: "b3ccc1f5-4945-4a14-8f84-363683bbd575") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 18:59:00 crc kubenswrapper[4792]: I0929 18:59:00.251176 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-rwhnq"] Sep 29 18:59:00 crc kubenswrapper[4792]: I0929 18:59:00.252113 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-rwhnq" Sep 29 18:59:00 crc kubenswrapper[4792]: I0929 18:59:00.266749 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 18:59:00 crc kubenswrapper[4792]: E0929 18:59:00.267022 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 18:59:00.767008688 +0000 UTC m=+152.760316084 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 18:59:00 crc kubenswrapper[4792]: I0929 18:59:00.272118 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-6r69d" Sep 29 18:59:00 crc kubenswrapper[4792]: I0929 18:59:00.302265 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Sep 29 18:59:00 crc kubenswrapper[4792]: I0929 18:59:00.322550 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-rwhnq"] Sep 29 18:59:00 crc kubenswrapper[4792]: I0929 18:59:00.333724 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ingress/router-default-5444994796-ldssp" Sep 29 18:59:00 crc kubenswrapper[4792]: I0929 18:59:00.342138 4792 patch_prober.go:28] interesting pod/router-default-5444994796-ldssp container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Sep 29 18:59:00 crc kubenswrapper[4792]: [-]has-synced failed: reason withheld Sep 29 18:59:00 crc kubenswrapper[4792]: [+]process-running ok Sep 29 18:59:00 crc kubenswrapper[4792]: healthz check failed Sep 29 18:59:00 crc kubenswrapper[4792]: I0929 18:59:00.342602 4792 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-ldssp" podUID="1b36a633-3ac6-4670-aa21-b5e3f750484f" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Sep 29 18:59:00 crc kubenswrapper[4792]: I0929 18:59:00.368947 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f9df84a0-6eb0-415a-ae29-c93ba496f855-utilities\") pod \"redhat-marketplace-rwhnq\" (UID: \"f9df84a0-6eb0-415a-ae29-c93ba496f855\") " pod="openshift-marketplace/redhat-marketplace-rwhnq" Sep 29 18:59:00 crc kubenswrapper[4792]: I0929 18:59:00.369024 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-fnpzd\" (UID: \"b3ccc1f5-4945-4a14-8f84-363683bbd575\") " pod="openshift-image-registry/image-registry-697d97f7c8-fnpzd" Sep 29 18:59:00 crc kubenswrapper[4792]: I0929 18:59:00.369050 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f9df84a0-6eb0-415a-ae29-c93ba496f855-catalog-content\") pod \"redhat-marketplace-rwhnq\" (UID: \"f9df84a0-6eb0-415a-ae29-c93ba496f855\") " pod="openshift-marketplace/redhat-marketplace-rwhnq" Sep 29 18:59:00 crc kubenswrapper[4792]: I0929 18:59:00.369128 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c2z2m\" (UniqueName: \"kubernetes.io/projected/f9df84a0-6eb0-415a-ae29-c93ba496f855-kube-api-access-c2z2m\") pod \"redhat-marketplace-rwhnq\" (UID: \"f9df84a0-6eb0-415a-ae29-c93ba496f855\") " pod="openshift-marketplace/redhat-marketplace-rwhnq" Sep 29 18:59:00 crc kubenswrapper[4792]: E0929 18:59:00.371419 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 18:59:00.871405152 +0000 UTC m=+152.864712548 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-fnpzd" (UID: "b3ccc1f5-4945-4a14-8f84-363683bbd575") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 18:59:00 crc kubenswrapper[4792]: I0929 18:59:00.430121 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-vt754" Sep 29 18:59:00 crc kubenswrapper[4792]: I0929 18:59:00.469211 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-config-operator/openshift-config-operator-7777fb866f-ktzhf" Sep 29 18:59:00 crc kubenswrapper[4792]: I0929 18:59:00.470163 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 18:59:00 crc kubenswrapper[4792]: I0929 18:59:00.470411 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f9df84a0-6eb0-415a-ae29-c93ba496f855-utilities\") pod \"redhat-marketplace-rwhnq\" (UID: \"f9df84a0-6eb0-415a-ae29-c93ba496f855\") " pod="openshift-marketplace/redhat-marketplace-rwhnq" Sep 29 18:59:00 crc kubenswrapper[4792]: I0929 18:59:00.470443 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f9df84a0-6eb0-415a-ae29-c93ba496f855-catalog-content\") pod \"redhat-marketplace-rwhnq\" (UID: \"f9df84a0-6eb0-415a-ae29-c93ba496f855\") " pod="openshift-marketplace/redhat-marketplace-rwhnq" Sep 29 18:59:00 crc kubenswrapper[4792]: I0929 18:59:00.470488 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c2z2m\" (UniqueName: \"kubernetes.io/projected/f9df84a0-6eb0-415a-ae29-c93ba496f855-kube-api-access-c2z2m\") pod \"redhat-marketplace-rwhnq\" (UID: \"f9df84a0-6eb0-415a-ae29-c93ba496f855\") " pod="openshift-marketplace/redhat-marketplace-rwhnq" Sep 29 18:59:00 crc kubenswrapper[4792]: E0929 18:59:00.471410 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 18:59:00.971395127 +0000 UTC m=+152.964702523 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 18:59:00 crc kubenswrapper[4792]: I0929 18:59:00.471921 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f9df84a0-6eb0-415a-ae29-c93ba496f855-utilities\") pod \"redhat-marketplace-rwhnq\" (UID: \"f9df84a0-6eb0-415a-ae29-c93ba496f855\") " pod="openshift-marketplace/redhat-marketplace-rwhnq" Sep 29 18:59:00 crc kubenswrapper[4792]: I0929 18:59:00.472328 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f9df84a0-6eb0-415a-ae29-c93ba496f855-catalog-content\") pod \"redhat-marketplace-rwhnq\" (UID: \"f9df84a0-6eb0-415a-ae29-c93ba496f855\") " pod="openshift-marketplace/redhat-marketplace-rwhnq" Sep 29 18:59:00 crc kubenswrapper[4792]: I0929 18:59:00.570057 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c2z2m\" (UniqueName: \"kubernetes.io/projected/f9df84a0-6eb0-415a-ae29-c93ba496f855-kube-api-access-c2z2m\") pod \"redhat-marketplace-rwhnq\" (UID: \"f9df84a0-6eb0-415a-ae29-c93ba496f855\") " pod="openshift-marketplace/redhat-marketplace-rwhnq" Sep 29 18:59:00 crc kubenswrapper[4792]: I0929 18:59:00.575674 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-fnpzd\" (UID: \"b3ccc1f5-4945-4a14-8f84-363683bbd575\") " pod="openshift-image-registry/image-registry-697d97f7c8-fnpzd" Sep 29 18:59:00 crc kubenswrapper[4792]: E0929 18:59:00.577279 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 18:59:01.077248121 +0000 UTC m=+153.070555507 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-fnpzd" (UID: "b3ccc1f5-4945-4a14-8f84-363683bbd575") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 18:59:00 crc kubenswrapper[4792]: I0929 18:59:00.605691 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-rwhnq" Sep 29 18:59:00 crc kubenswrapper[4792]: I0929 18:59:00.667082 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-7kmj2"] Sep 29 18:59:00 crc kubenswrapper[4792]: I0929 18:59:00.669153 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-7kmj2" Sep 29 18:59:00 crc kubenswrapper[4792]: I0929 18:59:00.679072 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 18:59:00 crc kubenswrapper[4792]: I0929 18:59:00.679490 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b2e3cd89-1359-4a18-ade6-05cba0c68e70-catalog-content\") pod \"redhat-marketplace-7kmj2\" (UID: \"b2e3cd89-1359-4a18-ade6-05cba0c68e70\") " pod="openshift-marketplace/redhat-marketplace-7kmj2" Sep 29 18:59:00 crc kubenswrapper[4792]: I0929 18:59:00.679527 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b2e3cd89-1359-4a18-ade6-05cba0c68e70-utilities\") pod \"redhat-marketplace-7kmj2\" (UID: \"b2e3cd89-1359-4a18-ade6-05cba0c68e70\") " pod="openshift-marketplace/redhat-marketplace-7kmj2" Sep 29 18:59:00 crc kubenswrapper[4792]: I0929 18:59:00.679556 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cvplj\" (UniqueName: \"kubernetes.io/projected/b2e3cd89-1359-4a18-ade6-05cba0c68e70-kube-api-access-cvplj\") pod \"redhat-marketplace-7kmj2\" (UID: \"b2e3cd89-1359-4a18-ade6-05cba0c68e70\") " pod="openshift-marketplace/redhat-marketplace-7kmj2" Sep 29 18:59:00 crc kubenswrapper[4792]: E0929 18:59:00.679704 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 18:59:01.179682582 +0000 UTC m=+153.172989968 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 18:59:00 crc kubenswrapper[4792]: I0929 18:59:00.749738 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-7kmj2"] Sep 29 18:59:00 crc kubenswrapper[4792]: I0929 18:59:00.764034 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-grgxc"] Sep 29 18:59:00 crc kubenswrapper[4792]: I0929 18:59:00.784019 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-fnpzd\" (UID: \"b3ccc1f5-4945-4a14-8f84-363683bbd575\") " pod="openshift-image-registry/image-registry-697d97f7c8-fnpzd" Sep 29 18:59:00 crc kubenswrapper[4792]: I0929 18:59:00.784161 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b2e3cd89-1359-4a18-ade6-05cba0c68e70-catalog-content\") pod \"redhat-marketplace-7kmj2\" (UID: \"b2e3cd89-1359-4a18-ade6-05cba0c68e70\") " pod="openshift-marketplace/redhat-marketplace-7kmj2" Sep 29 18:59:00 crc kubenswrapper[4792]: I0929 18:59:00.784206 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b2e3cd89-1359-4a18-ade6-05cba0c68e70-utilities\") pod \"redhat-marketplace-7kmj2\" (UID: \"b2e3cd89-1359-4a18-ade6-05cba0c68e70\") " pod="openshift-marketplace/redhat-marketplace-7kmj2" Sep 29 18:59:00 crc kubenswrapper[4792]: I0929 18:59:00.784236 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cvplj\" (UniqueName: \"kubernetes.io/projected/b2e3cd89-1359-4a18-ade6-05cba0c68e70-kube-api-access-cvplj\") pod \"redhat-marketplace-7kmj2\" (UID: \"b2e3cd89-1359-4a18-ade6-05cba0c68e70\") " pod="openshift-marketplace/redhat-marketplace-7kmj2" Sep 29 18:59:00 crc kubenswrapper[4792]: E0929 18:59:00.796220 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 18:59:01.296204208 +0000 UTC m=+153.289511604 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-fnpzd" (UID: "b3ccc1f5-4945-4a14-8f84-363683bbd575") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 18:59:00 crc kubenswrapper[4792]: I0929 18:59:00.796605 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b2e3cd89-1359-4a18-ade6-05cba0c68e70-catalog-content\") pod \"redhat-marketplace-7kmj2\" (UID: \"b2e3cd89-1359-4a18-ade6-05cba0c68e70\") " pod="openshift-marketplace/redhat-marketplace-7kmj2" Sep 29 18:59:00 crc kubenswrapper[4792]: I0929 18:59:00.796816 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b2e3cd89-1359-4a18-ade6-05cba0c68e70-utilities\") pod \"redhat-marketplace-7kmj2\" (UID: \"b2e3cd89-1359-4a18-ade6-05cba0c68e70\") " pod="openshift-marketplace/redhat-marketplace-7kmj2" Sep 29 18:59:00 crc kubenswrapper[4792]: I0929 18:59:00.862623 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cvplj\" (UniqueName: \"kubernetes.io/projected/b2e3cd89-1359-4a18-ade6-05cba0c68e70-kube-api-access-cvplj\") pod \"redhat-marketplace-7kmj2\" (UID: \"b2e3cd89-1359-4a18-ade6-05cba0c68e70\") " pod="openshift-marketplace/redhat-marketplace-7kmj2" Sep 29 18:59:00 crc kubenswrapper[4792]: I0929 18:59:00.900795 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 18:59:00 crc kubenswrapper[4792]: E0929 18:59:00.901359 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 18:59:01.401330542 +0000 UTC m=+153.394637938 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 18:59:01 crc kubenswrapper[4792]: I0929 18:59:01.002060 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-f9k8v"] Sep 29 18:59:01 crc kubenswrapper[4792]: I0929 18:59:01.002715 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-fnpzd\" (UID: \"b3ccc1f5-4945-4a14-8f84-363683bbd575\") " pod="openshift-image-registry/image-registry-697d97f7c8-fnpzd" Sep 29 18:59:01 crc kubenswrapper[4792]: E0929 18:59:01.003045 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 18:59:01.503029353 +0000 UTC m=+153.496336749 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-fnpzd" (UID: "b3ccc1f5-4945-4a14-8f84-363683bbd575") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 18:59:01 crc kubenswrapper[4792]: I0929 18:59:01.011550 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-7kmj2" Sep 29 18:59:01 crc kubenswrapper[4792]: I0929 18:59:01.097996 4792 patch_prober.go:28] interesting pod/downloads-7954f5f757-7phmq container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.42:8080/\": dial tcp 10.217.0.42:8080: connect: connection refused" start-of-body= Sep 29 18:59:01 crc kubenswrapper[4792]: I0929 18:59:01.098049 4792 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-7phmq" podUID="662a9f05-f1a7-4d9d-8b42-daadfeddb122" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.42:8080/\": dial tcp 10.217.0.42:8080: connect: connection refused" Sep 29 18:59:01 crc kubenswrapper[4792]: I0929 18:59:01.098420 4792 patch_prober.go:28] interesting pod/downloads-7954f5f757-7phmq container/download-server namespace/openshift-console: Liveness probe status=failure output="Get \"http://10.217.0.42:8080/\": dial tcp 10.217.0.42:8080: connect: connection refused" start-of-body= Sep 29 18:59:01 crc kubenswrapper[4792]: I0929 18:59:01.098441 4792 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-console/downloads-7954f5f757-7phmq" podUID="662a9f05-f1a7-4d9d-8b42-daadfeddb122" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.42:8080/\": dial tcp 10.217.0.42:8080: connect: connection refused" Sep 29 18:59:01 crc kubenswrapper[4792]: I0929 18:59:01.100498 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console-operator/console-operator-58897d9998-bpj7h" Sep 29 18:59:01 crc kubenswrapper[4792]: I0929 18:59:01.105676 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 18:59:01 crc kubenswrapper[4792]: E0929 18:59:01.106045 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 18:59:01.60602605 +0000 UTC m=+153.599333446 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 18:59:01 crc kubenswrapper[4792]: I0929 18:59:01.116423 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-console/console-f9d7485db-7pt7w" Sep 29 18:59:01 crc kubenswrapper[4792]: I0929 18:59:01.116457 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/console-f9d7485db-7pt7w" Sep 29 18:59:01 crc kubenswrapper[4792]: I0929 18:59:01.129328 4792 patch_prober.go:28] interesting pod/console-f9d7485db-7pt7w container/console namespace/openshift-console: Startup probe status=failure output="Get \"https://10.217.0.40:8443/health\": dial tcp 10.217.0.40:8443: connect: connection refused" start-of-body= Sep 29 18:59:01 crc kubenswrapper[4792]: I0929 18:59:01.129389 4792 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-console/console-f9d7485db-7pt7w" podUID="577a8444-c6e3-4aae-922e-12c7cb3b0b11" containerName="console" probeResult="failure" output="Get \"https://10.217.0.40:8443/health\": dial tcp 10.217.0.40:8443: connect: connection refused" Sep 29 18:59:01 crc kubenswrapper[4792]: I0929 18:59:01.156995 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-llhzn" event={"ID":"de0a9077-e8b8-4b2c-bfdf-4e965627f520","Type":"ContainerStarted","Data":"fc358f8ca2540c2cbe0752bb7b0b8288c8770fbd804f9eef43fea6ea2d103a6f"} Sep 29 18:59:01 crc kubenswrapper[4792]: I0929 18:59:01.157063 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-llhzn" event={"ID":"de0a9077-e8b8-4b2c-bfdf-4e965627f520","Type":"ContainerStarted","Data":"77b201a3a10d15a7d22a23e810153a5520eacd854960f6019cee8a231070e6fd"} Sep 29 18:59:01 crc kubenswrapper[4792]: I0929 18:59:01.173683 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-grgxc" event={"ID":"51c4e50f-73c5-485d-a704-b0454f9cfde5","Type":"ContainerStarted","Data":"5a942834ccbc865ddd4d0876c48783a5459ba21b2c7056a68dd231c0cf62c6ca"} Sep 29 18:59:01 crc kubenswrapper[4792]: I0929 18:59:01.209991 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-fnpzd\" (UID: \"b3ccc1f5-4945-4a14-8f84-363683bbd575\") " pod="openshift-image-registry/image-registry-697d97f7c8-fnpzd" Sep 29 18:59:01 crc kubenswrapper[4792]: E0929 18:59:01.212944 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 18:59:01.712922443 +0000 UTC m=+153.706229909 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-fnpzd" (UID: "b3ccc1f5-4945-4a14-8f84-363683bbd575") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 18:59:01 crc kubenswrapper[4792]: I0929 18:59:01.237966 4792 plugin_watcher.go:194] "Adding socket path or updating timestamp to desired state cache" path="/var/lib/kubelet/plugins_registry/kubevirt.io.hostpath-provisioner-reg.sock" Sep 29 18:59:01 crc kubenswrapper[4792]: I0929 18:59:01.248713 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-wmjgp"] Sep 29 18:59:01 crc kubenswrapper[4792]: I0929 18:59:01.312198 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 18:59:01 crc kubenswrapper[4792]: E0929 18:59:01.312953 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 18:59:01.812924077 +0000 UTC m=+153.806231473 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 18:59:01 crc kubenswrapper[4792]: I0929 18:59:01.417242 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-fnpzd\" (UID: \"b3ccc1f5-4945-4a14-8f84-363683bbd575\") " pod="openshift-image-registry/image-registry-697d97f7c8-fnpzd" Sep 29 18:59:01 crc kubenswrapper[4792]: I0929 18:59:01.421171 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-ingress/router-default-5444994796-ldssp" Sep 29 18:59:01 crc kubenswrapper[4792]: E0929 18:59:01.421552 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 18:59:01.921526877 +0000 UTC m=+153.914834273 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-fnpzd" (UID: "b3ccc1f5-4945-4a14-8f84-363683bbd575") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 18:59:01 crc kubenswrapper[4792]: I0929 18:59:01.437397 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ingress/router-default-5444994796-ldssp" Sep 29 18:59:01 crc kubenswrapper[4792]: I0929 18:59:01.473638 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319525-kwqwh" Sep 29 18:59:01 crc kubenswrapper[4792]: I0929 18:59:01.526434 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/a5fc467b-0e4c-4f20-9729-56906756b33d-config-volume\") pod \"a5fc467b-0e4c-4f20-9729-56906756b33d\" (UID: \"a5fc467b-0e4c-4f20-9729-56906756b33d\") " Sep 29 18:59:01 crc kubenswrapper[4792]: I0929 18:59:01.526568 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-thdfx\" (UniqueName: \"kubernetes.io/projected/a5fc467b-0e4c-4f20-9729-56906756b33d-kube-api-access-thdfx\") pod \"a5fc467b-0e4c-4f20-9729-56906756b33d\" (UID: \"a5fc467b-0e4c-4f20-9729-56906756b33d\") " Sep 29 18:59:01 crc kubenswrapper[4792]: I0929 18:59:01.526735 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 18:59:01 crc kubenswrapper[4792]: I0929 18:59:01.526780 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/a5fc467b-0e4c-4f20-9729-56906756b33d-secret-volume\") pod \"a5fc467b-0e4c-4f20-9729-56906756b33d\" (UID: \"a5fc467b-0e4c-4f20-9729-56906756b33d\") " Sep 29 18:59:01 crc kubenswrapper[4792]: I0929 18:59:01.533365 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a5fc467b-0e4c-4f20-9729-56906756b33d-config-volume" (OuterVolumeSpecName: "config-volume") pod "a5fc467b-0e4c-4f20-9729-56906756b33d" (UID: "a5fc467b-0e4c-4f20-9729-56906756b33d"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 18:59:01 crc kubenswrapper[4792]: E0929 18:59:01.533475 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 18:59:02.033455027 +0000 UTC m=+154.026762423 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 18:59:01 crc kubenswrapper[4792]: I0929 18:59:01.537977 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a5fc467b-0e4c-4f20-9729-56906756b33d-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "a5fc467b-0e4c-4f20-9729-56906756b33d" (UID: "a5fc467b-0e4c-4f20-9729-56906756b33d"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 18:59:01 crc kubenswrapper[4792]: I0929 18:59:01.545940 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a5fc467b-0e4c-4f20-9729-56906756b33d-kube-api-access-thdfx" (OuterVolumeSpecName: "kube-api-access-thdfx") pod "a5fc467b-0e4c-4f20-9729-56906756b33d" (UID: "a5fc467b-0e4c-4f20-9729-56906756b33d"). InnerVolumeSpecName "kube-api-access-thdfx". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 18:59:01 crc kubenswrapper[4792]: I0929 18:59:01.629477 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-fnpzd\" (UID: \"b3ccc1f5-4945-4a14-8f84-363683bbd575\") " pod="openshift-image-registry/image-registry-697d97f7c8-fnpzd" Sep 29 18:59:01 crc kubenswrapper[4792]: E0929 18:59:01.630288 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 18:59:02.130273015 +0000 UTC m=+154.123580411 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-fnpzd" (UID: "b3ccc1f5-4945-4a14-8f84-363683bbd575") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 18:59:01 crc kubenswrapper[4792]: I0929 18:59:01.630480 4792 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/a5fc467b-0e4c-4f20-9729-56906756b33d-secret-volume\") on node \"crc\" DevicePath \"\"" Sep 29 18:59:01 crc kubenswrapper[4792]: I0929 18:59:01.630495 4792 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/a5fc467b-0e4c-4f20-9729-56906756b33d-config-volume\") on node \"crc\" DevicePath \"\"" Sep 29 18:59:01 crc kubenswrapper[4792]: I0929 18:59:01.630507 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-thdfx\" (UniqueName: \"kubernetes.io/projected/a5fc467b-0e4c-4f20-9729-56906756b33d-kube-api-access-thdfx\") on node \"crc\" DevicePath \"\"" Sep 29 18:59:01 crc kubenswrapper[4792]: I0929 18:59:01.699836 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-7kmj2"] Sep 29 18:59:01 crc kubenswrapper[4792]: I0929 18:59:01.732268 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 18:59:01 crc kubenswrapper[4792]: E0929 18:59:01.732705 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 18:59:02.232689056 +0000 UTC m=+154.225996452 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 18:59:01 crc kubenswrapper[4792]: I0929 18:59:01.784034 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-zr4zx"] Sep 29 18:59:01 crc kubenswrapper[4792]: E0929 18:59:01.784220 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a5fc467b-0e4c-4f20-9729-56906756b33d" containerName="collect-profiles" Sep 29 18:59:01 crc kubenswrapper[4792]: I0929 18:59:01.784232 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="a5fc467b-0e4c-4f20-9729-56906756b33d" containerName="collect-profiles" Sep 29 18:59:01 crc kubenswrapper[4792]: I0929 18:59:01.784350 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="a5fc467b-0e4c-4f20-9729-56906756b33d" containerName="collect-profiles" Sep 29 18:59:01 crc kubenswrapper[4792]: I0929 18:59:01.785019 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-zr4zx" Sep 29 18:59:01 crc kubenswrapper[4792]: I0929 18:59:01.788358 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Sep 29 18:59:01 crc kubenswrapper[4792]: I0929 18:59:01.814478 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-zr4zx"] Sep 29 18:59:01 crc kubenswrapper[4792]: I0929 18:59:01.835923 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/71994559-92a6-4331-bbbb-04e9d2498c9b-utilities\") pod \"redhat-operators-zr4zx\" (UID: \"71994559-92a6-4331-bbbb-04e9d2498c9b\") " pod="openshift-marketplace/redhat-operators-zr4zx" Sep 29 18:59:01 crc kubenswrapper[4792]: I0929 18:59:01.835986 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l88f5\" (UniqueName: \"kubernetes.io/projected/71994559-92a6-4331-bbbb-04e9d2498c9b-kube-api-access-l88f5\") pod \"redhat-operators-zr4zx\" (UID: \"71994559-92a6-4331-bbbb-04e9d2498c9b\") " pod="openshift-marketplace/redhat-operators-zr4zx" Sep 29 18:59:01 crc kubenswrapper[4792]: I0929 18:59:01.836024 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-fnpzd\" (UID: \"b3ccc1f5-4945-4a14-8f84-363683bbd575\") " pod="openshift-image-registry/image-registry-697d97f7c8-fnpzd" Sep 29 18:59:01 crc kubenswrapper[4792]: I0929 18:59:01.836071 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/71994559-92a6-4331-bbbb-04e9d2498c9b-catalog-content\") pod \"redhat-operators-zr4zx\" (UID: \"71994559-92a6-4331-bbbb-04e9d2498c9b\") " pod="openshift-marketplace/redhat-operators-zr4zx" Sep 29 18:59:01 crc kubenswrapper[4792]: E0929 18:59:01.836522 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 18:59:02.336505503 +0000 UTC m=+154.329812889 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-fnpzd" (UID: "b3ccc1f5-4945-4a14-8f84-363683bbd575") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 18:59:01 crc kubenswrapper[4792]: I0929 18:59:01.867775 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Sep 29 18:59:01 crc kubenswrapper[4792]: E0929 18:59:01.939254 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 18:59:02.439232653 +0000 UTC m=+154.432540049 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 18:59:01 crc kubenswrapper[4792]: I0929 18:59:01.939661 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 18:59:01 crc kubenswrapper[4792]: I0929 18:59:01.939872 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-fnpzd\" (UID: \"b3ccc1f5-4945-4a14-8f84-363683bbd575\") " pod="openshift-image-registry/image-registry-697d97f7c8-fnpzd" Sep 29 18:59:01 crc kubenswrapper[4792]: I0929 18:59:01.939921 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/71994559-92a6-4331-bbbb-04e9d2498c9b-catalog-content\") pod \"redhat-operators-zr4zx\" (UID: \"71994559-92a6-4331-bbbb-04e9d2498c9b\") " pod="openshift-marketplace/redhat-operators-zr4zx" Sep 29 18:59:01 crc kubenswrapper[4792]: I0929 18:59:01.939967 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/71994559-92a6-4331-bbbb-04e9d2498c9b-utilities\") pod \"redhat-operators-zr4zx\" (UID: \"71994559-92a6-4331-bbbb-04e9d2498c9b\") " pod="openshift-marketplace/redhat-operators-zr4zx" Sep 29 18:59:01 crc kubenswrapper[4792]: I0929 18:59:01.940004 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l88f5\" (UniqueName: \"kubernetes.io/projected/71994559-92a6-4331-bbbb-04e9d2498c9b-kube-api-access-l88f5\") pod \"redhat-operators-zr4zx\" (UID: \"71994559-92a6-4331-bbbb-04e9d2498c9b\") " pod="openshift-marketplace/redhat-operators-zr4zx" Sep 29 18:59:01 crc kubenswrapper[4792]: E0929 18:59:01.940456 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 18:59:02.440448426 +0000 UTC m=+154.433755822 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-fnpzd" (UID: "b3ccc1f5-4945-4a14-8f84-363683bbd575") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 18:59:01 crc kubenswrapper[4792]: I0929 18:59:01.940956 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/71994559-92a6-4331-bbbb-04e9d2498c9b-catalog-content\") pod \"redhat-operators-zr4zx\" (UID: \"71994559-92a6-4331-bbbb-04e9d2498c9b\") " pod="openshift-marketplace/redhat-operators-zr4zx" Sep 29 18:59:01 crc kubenswrapper[4792]: I0929 18:59:01.941182 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/71994559-92a6-4331-bbbb-04e9d2498c9b-utilities\") pod \"redhat-operators-zr4zx\" (UID: \"71994559-92a6-4331-bbbb-04e9d2498c9b\") " pod="openshift-marketplace/redhat-operators-zr4zx" Sep 29 18:59:01 crc kubenswrapper[4792]: I0929 18:59:01.963407 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l88f5\" (UniqueName: \"kubernetes.io/projected/71994559-92a6-4331-bbbb-04e9d2498c9b-kube-api-access-l88f5\") pod \"redhat-operators-zr4zx\" (UID: \"71994559-92a6-4331-bbbb-04e9d2498c9b\") " pod="openshift-marketplace/redhat-operators-zr4zx" Sep 29 18:59:01 crc kubenswrapper[4792]: I0929 18:59:01.976636 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-rwhnq"] Sep 29 18:59:02 crc kubenswrapper[4792]: I0929 18:59:02.050538 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 18:59:02 crc kubenswrapper[4792]: E0929 18:59:02.060415 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 18:59:02.560364003 +0000 UTC m=+154.553671399 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 18:59:02 crc kubenswrapper[4792]: I0929 18:59:02.106190 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-zr4zx" Sep 29 18:59:02 crc kubenswrapper[4792]: I0929 18:59:02.153118 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-fnpzd\" (UID: \"b3ccc1f5-4945-4a14-8f84-363683bbd575\") " pod="openshift-image-registry/image-registry-697d97f7c8-fnpzd" Sep 29 18:59:02 crc kubenswrapper[4792]: E0929 18:59:02.153427 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 18:59:02.653413109 +0000 UTC m=+154.646720505 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-fnpzd" (UID: "b3ccc1f5-4945-4a14-8f84-363683bbd575") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 18:59:02 crc kubenswrapper[4792]: I0929 18:59:02.196980 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-558ts"] Sep 29 18:59:02 crc kubenswrapper[4792]: I0929 18:59:02.197959 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-558ts" Sep 29 18:59:02 crc kubenswrapper[4792]: I0929 18:59:02.203120 4792 generic.go:334] "Generic (PLEG): container finished" podID="51c4e50f-73c5-485d-a704-b0454f9cfde5" containerID="1321241ef1cca6d5e01a10a972d27b19126ea080bf886b84df214af966d29482" exitCode=0 Sep 29 18:59:02 crc kubenswrapper[4792]: I0929 18:59:02.203772 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-grgxc" event={"ID":"51c4e50f-73c5-485d-a704-b0454f9cfde5","Type":"ContainerDied","Data":"1321241ef1cca6d5e01a10a972d27b19126ea080bf886b84df214af966d29482"} Sep 29 18:59:02 crc kubenswrapper[4792]: I0929 18:59:02.203821 4792 reconciler.go:161] "OperationExecutor.RegisterPlugin started" plugin={"SocketPath":"/var/lib/kubelet/plugins_registry/kubevirt.io.hostpath-provisioner-reg.sock","Timestamp":"2025-09-29T18:59:01.237988106Z","Handler":null,"Name":""} Sep 29 18:59:02 crc kubenswrapper[4792]: I0929 18:59:02.207539 4792 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Sep 29 18:59:02 crc kubenswrapper[4792]: I0929 18:59:02.211507 4792 generic.go:334] "Generic (PLEG): container finished" podID="c4e9ba50-617f-4f99-9430-ee6fb3d21b8e" containerID="89d1da23908d418782c64657fd64290c6db59f42bfba17548aed22f312f7f089" exitCode=0 Sep 29 18:59:02 crc kubenswrapper[4792]: I0929 18:59:02.211573 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-wmjgp" event={"ID":"c4e9ba50-617f-4f99-9430-ee6fb3d21b8e","Type":"ContainerDied","Data":"89d1da23908d418782c64657fd64290c6db59f42bfba17548aed22f312f7f089"} Sep 29 18:59:02 crc kubenswrapper[4792]: I0929 18:59:02.211600 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-wmjgp" event={"ID":"c4e9ba50-617f-4f99-9430-ee6fb3d21b8e","Type":"ContainerStarted","Data":"8c6c1d5a7b4302c79d9b9dce64f1b945c232546cedc82a9510c0cd312593d0b8"} Sep 29 18:59:02 crc kubenswrapper[4792]: I0929 18:59:02.217022 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"5cc522d7-c98b-4faf-8962-0a0e7274e18f","Type":"ContainerStarted","Data":"8650fe312467a799e2f5844c018dd850c0336b11ad1534db1f467e4ec6ffdf51"} Sep 29 18:59:02 crc kubenswrapper[4792]: I0929 18:59:02.229997 4792 generic.go:334] "Generic (PLEG): container finished" podID="b2e3cd89-1359-4a18-ade6-05cba0c68e70" containerID="cdfe5618632f54aaf2044d09d31cc1ab863212fc51298b2b74d5df87b8e7d0a5" exitCode=0 Sep 29 18:59:02 crc kubenswrapper[4792]: I0929 18:59:02.230085 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-7kmj2" event={"ID":"b2e3cd89-1359-4a18-ade6-05cba0c68e70","Type":"ContainerDied","Data":"cdfe5618632f54aaf2044d09d31cc1ab863212fc51298b2b74d5df87b8e7d0a5"} Sep 29 18:59:02 crc kubenswrapper[4792]: I0929 18:59:02.230117 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-7kmj2" event={"ID":"b2e3cd89-1359-4a18-ade6-05cba0c68e70","Type":"ContainerStarted","Data":"26c87749a50f21fa161637662e76dd927114119bfb872e64fb3c4543ca9e51a4"} Sep 29 18:59:02 crc kubenswrapper[4792]: I0929 18:59:02.233405 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-558ts"] Sep 29 18:59:02 crc kubenswrapper[4792]: I0929 18:59:02.239348 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-xkp78" event={"ID":"d813bd44-0760-4757-95da-beced796238f","Type":"ContainerStarted","Data":"aff76d39a94f5ec334209c8eec344b3d6d89e95db89e8c8723eaa81b5bfc6001"} Sep 29 18:59:02 crc kubenswrapper[4792]: I0929 18:59:02.244066 4792 csi_plugin.go:100] kubernetes.io/csi: Trying to validate a new CSI Driver with name: kubevirt.io.hostpath-provisioner endpoint: /var/lib/kubelet/plugins/csi-hostpath/csi.sock versions: 1.0.0 Sep 29 18:59:02 crc kubenswrapper[4792]: I0929 18:59:02.244105 4792 csi_plugin.go:113] kubernetes.io/csi: Register new plugin with name: kubevirt.io.hostpath-provisioner at endpoint: /var/lib/kubelet/plugins/csi-hostpath/csi.sock Sep 29 18:59:02 crc kubenswrapper[4792]: I0929 18:59:02.254893 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 18:59:02 crc kubenswrapper[4792]: I0929 18:59:02.255197 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/103cb699-909d-400e-97cc-872f7769a806-catalog-content\") pod \"redhat-operators-558ts\" (UID: \"103cb699-909d-400e-97cc-872f7769a806\") " pod="openshift-marketplace/redhat-operators-558ts" Sep 29 18:59:02 crc kubenswrapper[4792]: I0929 18:59:02.255254 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/103cb699-909d-400e-97cc-872f7769a806-utilities\") pod \"redhat-operators-558ts\" (UID: \"103cb699-909d-400e-97cc-872f7769a806\") " pod="openshift-marketplace/redhat-operators-558ts" Sep 29 18:59:02 crc kubenswrapper[4792]: I0929 18:59:02.255287 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6wtt7\" (UniqueName: \"kubernetes.io/projected/103cb699-909d-400e-97cc-872f7769a806-kube-api-access-6wtt7\") pod \"redhat-operators-558ts\" (UID: \"103cb699-909d-400e-97cc-872f7769a806\") " pod="openshift-marketplace/redhat-operators-558ts" Sep 29 18:59:02 crc kubenswrapper[4792]: I0929 18:59:02.274524 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29319525-kwqwh" event={"ID":"a5fc467b-0e4c-4f20-9729-56906756b33d","Type":"ContainerDied","Data":"997f15ae24d17a24c343fcc1c5ec7d3ba9f2188042674a8512763a45e31b042e"} Sep 29 18:59:02 crc kubenswrapper[4792]: I0929 18:59:02.274565 4792 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="997f15ae24d17a24c343fcc1c5ec7d3ba9f2188042674a8512763a45e31b042e" Sep 29 18:59:02 crc kubenswrapper[4792]: I0929 18:59:02.274669 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319525-kwqwh" Sep 29 18:59:02 crc kubenswrapper[4792]: I0929 18:59:02.281067 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-rwhnq" event={"ID":"f9df84a0-6eb0-415a-ae29-c93ba496f855","Type":"ContainerStarted","Data":"8b9aa392a9aa5a8f7c3733593131b28f53d6ad590a2a1e16bc8bbfff0b128372"} Sep 29 18:59:02 crc kubenswrapper[4792]: I0929 18:59:02.281988 4792 generic.go:334] "Generic (PLEG): container finished" podID="82bcbfe6-9f95-4749-80a0-81b82a1b78d9" containerID="6e2ae06982081f4e1198b2b5861bf37f09b3bd8e3825f52c0ced7ab07954b8c4" exitCode=0 Sep 29 18:59:02 crc kubenswrapper[4792]: I0929 18:59:02.282033 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-f9k8v" event={"ID":"82bcbfe6-9f95-4749-80a0-81b82a1b78d9","Type":"ContainerDied","Data":"6e2ae06982081f4e1198b2b5861bf37f09b3bd8e3825f52c0ced7ab07954b8c4"} Sep 29 18:59:02 crc kubenswrapper[4792]: I0929 18:59:02.282048 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-f9k8v" event={"ID":"82bcbfe6-9f95-4749-80a0-81b82a1b78d9","Type":"ContainerStarted","Data":"1ffb287befc01f4843fecb7c9e3f99e0de835f233c21713628af9cc70dfa60c5"} Sep 29 18:59:02 crc kubenswrapper[4792]: I0929 18:59:02.292337 4792 generic.go:334] "Generic (PLEG): container finished" podID="de0a9077-e8b8-4b2c-bfdf-4e965627f520" containerID="fc358f8ca2540c2cbe0752bb7b0b8288c8770fbd804f9eef43fea6ea2d103a6f" exitCode=0 Sep 29 18:59:02 crc kubenswrapper[4792]: I0929 18:59:02.293293 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-llhzn" event={"ID":"de0a9077-e8b8-4b2c-bfdf-4e965627f520","Type":"ContainerDied","Data":"fc358f8ca2540c2cbe0752bb7b0b8288c8770fbd804f9eef43fea6ea2d103a6f"} Sep 29 18:59:02 crc kubenswrapper[4792]: I0929 18:59:02.313237 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (OuterVolumeSpecName: "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8". PluginName "kubernetes.io/csi", VolumeGidValue "" Sep 29 18:59:02 crc kubenswrapper[4792]: I0929 18:59:02.358678 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-fnpzd\" (UID: \"b3ccc1f5-4945-4a14-8f84-363683bbd575\") " pod="openshift-image-registry/image-registry-697d97f7c8-fnpzd" Sep 29 18:59:02 crc kubenswrapper[4792]: I0929 18:59:02.359357 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/103cb699-909d-400e-97cc-872f7769a806-catalog-content\") pod \"redhat-operators-558ts\" (UID: \"103cb699-909d-400e-97cc-872f7769a806\") " pod="openshift-marketplace/redhat-operators-558ts" Sep 29 18:59:02 crc kubenswrapper[4792]: I0929 18:59:02.359451 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/103cb699-909d-400e-97cc-872f7769a806-utilities\") pod \"redhat-operators-558ts\" (UID: \"103cb699-909d-400e-97cc-872f7769a806\") " pod="openshift-marketplace/redhat-operators-558ts" Sep 29 18:59:02 crc kubenswrapper[4792]: I0929 18:59:02.359574 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6wtt7\" (UniqueName: \"kubernetes.io/projected/103cb699-909d-400e-97cc-872f7769a806-kube-api-access-6wtt7\") pod \"redhat-operators-558ts\" (UID: \"103cb699-909d-400e-97cc-872f7769a806\") " pod="openshift-marketplace/redhat-operators-558ts" Sep 29 18:59:02 crc kubenswrapper[4792]: I0929 18:59:02.360129 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/103cb699-909d-400e-97cc-872f7769a806-utilities\") pod \"redhat-operators-558ts\" (UID: \"103cb699-909d-400e-97cc-872f7769a806\") " pod="openshift-marketplace/redhat-operators-558ts" Sep 29 18:59:02 crc kubenswrapper[4792]: I0929 18:59:02.360967 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/103cb699-909d-400e-97cc-872f7769a806-catalog-content\") pod \"redhat-operators-558ts\" (UID: \"103cb699-909d-400e-97cc-872f7769a806\") " pod="openshift-marketplace/redhat-operators-558ts" Sep 29 18:59:02 crc kubenswrapper[4792]: I0929 18:59:02.372928 4792 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Sep 29 18:59:02 crc kubenswrapper[4792]: I0929 18:59:02.372981 4792 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-fnpzd\" (UID: \"b3ccc1f5-4945-4a14-8f84-363683bbd575\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount\"" pod="openshift-image-registry/image-registry-697d97f7c8-fnpzd" Sep 29 18:59:02 crc kubenswrapper[4792]: I0929 18:59:02.384198 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="hostpath-provisioner/csi-hostpathplugin-xkp78" podStartSLOduration=14.384177737 podStartE2EDuration="14.384177737s" podCreationTimestamp="2025-09-29 18:58:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 18:59:02.378482922 +0000 UTC m=+154.371790338" watchObservedRunningTime="2025-09-29 18:59:02.384177737 +0000 UTC m=+154.377485123" Sep 29 18:59:02 crc kubenswrapper[4792]: I0929 18:59:02.388611 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6wtt7\" (UniqueName: \"kubernetes.io/projected/103cb699-909d-400e-97cc-872f7769a806-kube-api-access-6wtt7\") pod \"redhat-operators-558ts\" (UID: \"103cb699-909d-400e-97cc-872f7769a806\") " pod="openshift-marketplace/redhat-operators-558ts" Sep 29 18:59:02 crc kubenswrapper[4792]: I0929 18:59:02.523363 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-558ts" Sep 29 18:59:02 crc kubenswrapper[4792]: I0929 18:59:02.676688 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-fnpzd\" (UID: \"b3ccc1f5-4945-4a14-8f84-363683bbd575\") " pod="openshift-image-registry/image-registry-697d97f7c8-fnpzd" Sep 29 18:59:02 crc kubenswrapper[4792]: I0929 18:59:02.703822 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-zr4zx"] Sep 29 18:59:02 crc kubenswrapper[4792]: W0929 18:59:02.728120 4792 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod71994559_92a6_4331_bbbb_04e9d2498c9b.slice/crio-47b480c16e708df0b6b4bef02dca9298fa0d2922c7632ca9d195491d7f6828cc WatchSource:0}: Error finding container 47b480c16e708df0b6b4bef02dca9298fa0d2922c7632ca9d195491d7f6828cc: Status 404 returned error can't find the container with id 47b480c16e708df0b6b4bef02dca9298fa0d2922c7632ca9d195491d7f6828cc Sep 29 18:59:02 crc kubenswrapper[4792]: I0929 18:59:02.994597 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-fnpzd" Sep 29 18:59:03 crc kubenswrapper[4792]: I0929 18:59:03.097370 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8f668bae-612b-4b75-9490-919e737c6a3b" path="/var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes" Sep 29 18:59:03 crc kubenswrapper[4792]: I0929 18:59:03.250103 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-558ts"] Sep 29 18:59:03 crc kubenswrapper[4792]: I0929 18:59:03.320079 4792 generic.go:334] "Generic (PLEG): container finished" podID="f9df84a0-6eb0-415a-ae29-c93ba496f855" containerID="a6c83bf8aa059b878605c7727573a1b67348f900120e0f2162eedb7ffb45af5f" exitCode=0 Sep 29 18:59:03 crc kubenswrapper[4792]: I0929 18:59:03.321080 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-rwhnq" event={"ID":"f9df84a0-6eb0-415a-ae29-c93ba496f855","Type":"ContainerDied","Data":"a6c83bf8aa059b878605c7727573a1b67348f900120e0f2162eedb7ffb45af5f"} Sep 29 18:59:03 crc kubenswrapper[4792]: I0929 18:59:03.420622 4792 generic.go:334] "Generic (PLEG): container finished" podID="71994559-92a6-4331-bbbb-04e9d2498c9b" containerID="12d411e78ee90c29e49da9680b84741acb133f64785b01d2c758c5ec329a5fd0" exitCode=0 Sep 29 18:59:03 crc kubenswrapper[4792]: I0929 18:59:03.420901 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-zr4zx" event={"ID":"71994559-92a6-4331-bbbb-04e9d2498c9b","Type":"ContainerDied","Data":"12d411e78ee90c29e49da9680b84741acb133f64785b01d2c758c5ec329a5fd0"} Sep 29 18:59:03 crc kubenswrapper[4792]: I0929 18:59:03.420926 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-zr4zx" event={"ID":"71994559-92a6-4331-bbbb-04e9d2498c9b","Type":"ContainerStarted","Data":"47b480c16e708df0b6b4bef02dca9298fa0d2922c7632ca9d195491d7f6828cc"} Sep 29 18:59:03 crc kubenswrapper[4792]: I0929 18:59:03.464557 4792 generic.go:334] "Generic (PLEG): container finished" podID="5cc522d7-c98b-4faf-8962-0a0e7274e18f" containerID="b7a4466a62c47f8f2121d86e68b235351d8928bd490a377196ef3be50584f73d" exitCode=0 Sep 29 18:59:03 crc kubenswrapper[4792]: I0929 18:59:03.466129 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"5cc522d7-c98b-4faf-8962-0a0e7274e18f","Type":"ContainerDied","Data":"b7a4466a62c47f8f2121d86e68b235351d8928bd490a377196ef3be50584f73d"} Sep 29 18:59:03 crc kubenswrapper[4792]: I0929 18:59:03.486217 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-fnpzd"] Sep 29 18:59:04 crc kubenswrapper[4792]: I0929 18:59:04.385250 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Sep 29 18:59:04 crc kubenswrapper[4792]: I0929 18:59:04.386486 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Sep 29 18:59:04 crc kubenswrapper[4792]: I0929 18:59:04.389427 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver"/"kube-root-ca.crt" Sep 29 18:59:04 crc kubenswrapper[4792]: I0929 18:59:04.389978 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver"/"installer-sa-dockercfg-5pr6n" Sep 29 18:59:04 crc kubenswrapper[4792]: I0929 18:59:04.397300 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Sep 29 18:59:04 crc kubenswrapper[4792]: I0929 18:59:04.519609 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/850d3d01-4107-4025-8546-c95be2ce7578-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"850d3d01-4107-4025-8546-c95be2ce7578\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Sep 29 18:59:04 crc kubenswrapper[4792]: I0929 18:59:04.519978 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/850d3d01-4107-4025-8546-c95be2ce7578-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"850d3d01-4107-4025-8546-c95be2ce7578\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Sep 29 18:59:04 crc kubenswrapper[4792]: I0929 18:59:04.520404 4792 generic.go:334] "Generic (PLEG): container finished" podID="103cb699-909d-400e-97cc-872f7769a806" containerID="d65eea228d99012fb9647d3fdcff0cb6b1fdf83bc13c5ac9f43ec774b6b1a618" exitCode=0 Sep 29 18:59:04 crc kubenswrapper[4792]: I0929 18:59:04.520522 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-558ts" event={"ID":"103cb699-909d-400e-97cc-872f7769a806","Type":"ContainerDied","Data":"d65eea228d99012fb9647d3fdcff0cb6b1fdf83bc13c5ac9f43ec774b6b1a618"} Sep 29 18:59:04 crc kubenswrapper[4792]: I0929 18:59:04.520559 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-558ts" event={"ID":"103cb699-909d-400e-97cc-872f7769a806","Type":"ContainerStarted","Data":"75f1d17a988445f12301e96135d1417bda99b4fcdfeaf5fc0b1a9089dc61a7ea"} Sep 29 18:59:04 crc kubenswrapper[4792]: I0929 18:59:04.559899 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-fnpzd" event={"ID":"b3ccc1f5-4945-4a14-8f84-363683bbd575","Type":"ContainerStarted","Data":"e4638c54854df5d2f32ada99715b1c9b7f94c14a4ddf883b21181324cdd8d2d0"} Sep 29 18:59:04 crc kubenswrapper[4792]: I0929 18:59:04.560068 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-fnpzd" event={"ID":"b3ccc1f5-4945-4a14-8f84-363683bbd575","Type":"ContainerStarted","Data":"0d4bbd00d0c88324d507a00824f6176a15191e53cc41f6a5259cf4fb5796afaf"} Sep 29 18:59:04 crc kubenswrapper[4792]: I0929 18:59:04.560093 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-image-registry/image-registry-697d97f7c8-fnpzd" Sep 29 18:59:04 crc kubenswrapper[4792]: I0929 18:59:04.605359 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-registry-697d97f7c8-fnpzd" podStartSLOduration=131.605342891 podStartE2EDuration="2m11.605342891s" podCreationTimestamp="2025-09-29 18:56:53 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 18:59:04.604886508 +0000 UTC m=+156.598193904" watchObservedRunningTime="2025-09-29 18:59:04.605342891 +0000 UTC m=+156.598650287" Sep 29 18:59:04 crc kubenswrapper[4792]: I0929 18:59:04.621700 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/850d3d01-4107-4025-8546-c95be2ce7578-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"850d3d01-4107-4025-8546-c95be2ce7578\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Sep 29 18:59:04 crc kubenswrapper[4792]: I0929 18:59:04.621746 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/850d3d01-4107-4025-8546-c95be2ce7578-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"850d3d01-4107-4025-8546-c95be2ce7578\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Sep 29 18:59:04 crc kubenswrapper[4792]: I0929 18:59:04.623318 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/850d3d01-4107-4025-8546-c95be2ce7578-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"850d3d01-4107-4025-8546-c95be2ce7578\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Sep 29 18:59:04 crc kubenswrapper[4792]: I0929 18:59:04.656077 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/850d3d01-4107-4025-8546-c95be2ce7578-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"850d3d01-4107-4025-8546-c95be2ce7578\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Sep 29 18:59:04 crc kubenswrapper[4792]: I0929 18:59:04.727235 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Sep 29 18:59:05 crc kubenswrapper[4792]: I0929 18:59:05.505691 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Sep 29 18:59:05 crc kubenswrapper[4792]: I0929 18:59:05.536368 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/5cc522d7-c98b-4faf-8962-0a0e7274e18f-kube-api-access\") pod \"5cc522d7-c98b-4faf-8962-0a0e7274e18f\" (UID: \"5cc522d7-c98b-4faf-8962-0a0e7274e18f\") " Sep 29 18:59:05 crc kubenswrapper[4792]: I0929 18:59:05.536462 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/5cc522d7-c98b-4faf-8962-0a0e7274e18f-kubelet-dir\") pod \"5cc522d7-c98b-4faf-8962-0a0e7274e18f\" (UID: \"5cc522d7-c98b-4faf-8962-0a0e7274e18f\") " Sep 29 18:59:05 crc kubenswrapper[4792]: I0929 18:59:05.536735 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/5cc522d7-c98b-4faf-8962-0a0e7274e18f-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "5cc522d7-c98b-4faf-8962-0a0e7274e18f" (UID: "5cc522d7-c98b-4faf-8962-0a0e7274e18f"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 29 18:59:05 crc kubenswrapper[4792]: I0929 18:59:05.543055 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5cc522d7-c98b-4faf-8962-0a0e7274e18f-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "5cc522d7-c98b-4faf-8962-0a0e7274e18f" (UID: "5cc522d7-c98b-4faf-8962-0a0e7274e18f"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 18:59:05 crc kubenswrapper[4792]: I0929 18:59:05.593609 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Sep 29 18:59:05 crc kubenswrapper[4792]: I0929 18:59:05.597082 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"5cc522d7-c98b-4faf-8962-0a0e7274e18f","Type":"ContainerDied","Data":"8650fe312467a799e2f5844c018dd850c0336b11ad1534db1f467e4ec6ffdf51"} Sep 29 18:59:05 crc kubenswrapper[4792]: I0929 18:59:05.597795 4792 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8650fe312467a799e2f5844c018dd850c0336b11ad1534db1f467e4ec6ffdf51" Sep 29 18:59:05 crc kubenswrapper[4792]: I0929 18:59:05.638479 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/5cc522d7-c98b-4faf-8962-0a0e7274e18f-kube-api-access\") on node \"crc\" DevicePath \"\"" Sep 29 18:59:05 crc kubenswrapper[4792]: I0929 18:59:05.638508 4792 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/5cc522d7-c98b-4faf-8962-0a0e7274e18f-kubelet-dir\") on node \"crc\" DevicePath \"\"" Sep 29 18:59:05 crc kubenswrapper[4792]: I0929 18:59:05.801398 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Sep 29 18:59:06 crc kubenswrapper[4792]: I0929 18:59:06.144701 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-dns/dns-default-kmxdh" Sep 29 18:59:06 crc kubenswrapper[4792]: I0929 18:59:06.638367 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"850d3d01-4107-4025-8546-c95be2ce7578","Type":"ContainerStarted","Data":"c6f4e45738649bb5482edb518a8cf7bd29f39cc6e2e3ca2403d75f29ae3aac80"} Sep 29 18:59:07 crc kubenswrapper[4792]: I0929 18:59:07.678942 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"850d3d01-4107-4025-8546-c95be2ce7578","Type":"ContainerStarted","Data":"8ffc9e5b59b391160ba59d6555bc17981628814ac9672f4e597b2d8e582c8a43"} Sep 29 18:59:07 crc kubenswrapper[4792]: I0929 18:59:07.708478 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/revision-pruner-8-crc" podStartSLOduration=3.708455516 podStartE2EDuration="3.708455516s" podCreationTimestamp="2025-09-29 18:59:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 18:59:07.693094887 +0000 UTC m=+159.686402293" watchObservedRunningTime="2025-09-29 18:59:07.708455516 +0000 UTC m=+159.701762902" Sep 29 18:59:08 crc kubenswrapper[4792]: I0929 18:59:08.715352 4792 generic.go:334] "Generic (PLEG): container finished" podID="850d3d01-4107-4025-8546-c95be2ce7578" containerID="8ffc9e5b59b391160ba59d6555bc17981628814ac9672f4e597b2d8e582c8a43" exitCode=0 Sep 29 18:59:08 crc kubenswrapper[4792]: I0929 18:59:08.715459 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"850d3d01-4107-4025-8546-c95be2ce7578","Type":"ContainerDied","Data":"8ffc9e5b59b391160ba59d6555bc17981628814ac9672f4e597b2d8e582c8a43"} Sep 29 18:59:10 crc kubenswrapper[4792]: I0929 18:59:10.185931 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Sep 29 18:59:10 crc kubenswrapper[4792]: I0929 18:59:10.244718 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/850d3d01-4107-4025-8546-c95be2ce7578-kube-api-access\") pod \"850d3d01-4107-4025-8546-c95be2ce7578\" (UID: \"850d3d01-4107-4025-8546-c95be2ce7578\") " Sep 29 18:59:10 crc kubenswrapper[4792]: I0929 18:59:10.244865 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/850d3d01-4107-4025-8546-c95be2ce7578-kubelet-dir\") pod \"850d3d01-4107-4025-8546-c95be2ce7578\" (UID: \"850d3d01-4107-4025-8546-c95be2ce7578\") " Sep 29 18:59:10 crc kubenswrapper[4792]: I0929 18:59:10.245117 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/850d3d01-4107-4025-8546-c95be2ce7578-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "850d3d01-4107-4025-8546-c95be2ce7578" (UID: "850d3d01-4107-4025-8546-c95be2ce7578"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 29 18:59:10 crc kubenswrapper[4792]: I0929 18:59:10.287790 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/850d3d01-4107-4025-8546-c95be2ce7578-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "850d3d01-4107-4025-8546-c95be2ce7578" (UID: "850d3d01-4107-4025-8546-c95be2ce7578"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 18:59:10 crc kubenswrapper[4792]: I0929 18:59:10.345923 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/850d3d01-4107-4025-8546-c95be2ce7578-kube-api-access\") on node \"crc\" DevicePath \"\"" Sep 29 18:59:10 crc kubenswrapper[4792]: I0929 18:59:10.345959 4792 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/850d3d01-4107-4025-8546-c95be2ce7578-kubelet-dir\") on node \"crc\" DevicePath \"\"" Sep 29 18:59:10 crc kubenswrapper[4792]: I0929 18:59:10.754975 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"850d3d01-4107-4025-8546-c95be2ce7578","Type":"ContainerDied","Data":"c6f4e45738649bb5482edb518a8cf7bd29f39cc6e2e3ca2403d75f29ae3aac80"} Sep 29 18:59:10 crc kubenswrapper[4792]: I0929 18:59:10.755035 4792 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c6f4e45738649bb5482edb518a8cf7bd29f39cc6e2e3ca2403d75f29ae3aac80" Sep 29 18:59:10 crc kubenswrapper[4792]: I0929 18:59:10.755106 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Sep 29 18:59:11 crc kubenswrapper[4792]: I0929 18:59:11.095784 4792 patch_prober.go:28] interesting pod/downloads-7954f5f757-7phmq container/download-server namespace/openshift-console: Liveness probe status=failure output="Get \"http://10.217.0.42:8080/\": dial tcp 10.217.0.42:8080: connect: connection refused" start-of-body= Sep 29 18:59:11 crc kubenswrapper[4792]: I0929 18:59:11.098657 4792 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-console/downloads-7954f5f757-7phmq" podUID="662a9f05-f1a7-4d9d-8b42-daadfeddb122" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.42:8080/\": dial tcp 10.217.0.42:8080: connect: connection refused" Sep 29 18:59:11 crc kubenswrapper[4792]: I0929 18:59:11.095876 4792 patch_prober.go:28] interesting pod/downloads-7954f5f757-7phmq container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.42:8080/\": dial tcp 10.217.0.42:8080: connect: connection refused" start-of-body= Sep 29 18:59:11 crc kubenswrapper[4792]: I0929 18:59:11.099046 4792 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-7phmq" podUID="662a9f05-f1a7-4d9d-8b42-daadfeddb122" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.42:8080/\": dial tcp 10.217.0.42:8080: connect: connection refused" Sep 29 18:59:11 crc kubenswrapper[4792]: I0929 18:59:11.121117 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-console/console-f9d7485db-7pt7w" Sep 29 18:59:11 crc kubenswrapper[4792]: I0929 18:59:11.126602 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/console-f9d7485db-7pt7w" Sep 29 18:59:11 crc kubenswrapper[4792]: I0929 18:59:11.960062 4792 patch_prober.go:28] interesting pod/machine-config-daemon-p5q59 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 18:59:11 crc kubenswrapper[4792]: I0929 18:59:11.960139 4792 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" podUID="0ae66548-086e-4ca9-bd6f-281ce46e7557" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 18:59:15 crc kubenswrapper[4792]: I0929 18:59:15.479200 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/fd292349-0e5a-4d80-b163-193aa43c98db-metrics-certs\") pod \"network-metrics-daemon-v5b2m\" (UID: \"fd292349-0e5a-4d80-b163-193aa43c98db\") " pod="openshift-multus/network-metrics-daemon-v5b2m" Sep 29 18:59:15 crc kubenswrapper[4792]: I0929 18:59:15.488554 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/fd292349-0e5a-4d80-b163-193aa43c98db-metrics-certs\") pod \"network-metrics-daemon-v5b2m\" (UID: \"fd292349-0e5a-4d80-b163-193aa43c98db\") " pod="openshift-multus/network-metrics-daemon-v5b2m" Sep 29 18:59:15 crc kubenswrapper[4792]: I0929 18:59:15.645965 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-v5b2m" Sep 29 18:59:16 crc kubenswrapper[4792]: I0929 18:59:16.264936 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/network-metrics-daemon-v5b2m"] Sep 29 18:59:21 crc kubenswrapper[4792]: I0929 18:59:21.107312 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/downloads-7954f5f757-7phmq" Sep 29 18:59:23 crc kubenswrapper[4792]: I0929 18:59:23.000951 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-image-registry/image-registry-697d97f7c8-fnpzd" Sep 29 18:59:24 crc kubenswrapper[4792]: I0929 18:59:24.913554 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-v5b2m" event={"ID":"fd292349-0e5a-4d80-b163-193aa43c98db","Type":"ContainerStarted","Data":"3edc37c1a371f0dca1eacd61640e09b0af04ae18eda874e397e5f49371fda24c"} Sep 29 18:59:30 crc kubenswrapper[4792]: I0929 18:59:30.423692 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-bfnfv" Sep 29 18:59:34 crc kubenswrapper[4792]: E0929 18:59:34.346904 4792 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-marketplace-index:v4.18" Sep 29 18:59:34 crc kubenswrapper[4792]: E0929 18:59:34.347646 4792 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-marketplace-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-cvplj,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-marketplace-7kmj2_openshift-marketplace(b2e3cd89-1359-4a18-ade6-05cba0c68e70): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Sep 29 18:59:34 crc kubenswrapper[4792]: E0929 18:59:34.349115 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-marketplace-7kmj2" podUID="b2e3cd89-1359-4a18-ade6-05cba0c68e70" Sep 29 18:59:35 crc kubenswrapper[4792]: E0929 18:59:35.769006 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-marketplace-7kmj2" podUID="b2e3cd89-1359-4a18-ade6-05cba0c68e70" Sep 29 18:59:37 crc kubenswrapper[4792]: I0929 18:59:37.040710 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 18:59:37 crc kubenswrapper[4792]: E0929 18:59:37.066560 4792 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/community-operator-index:v4.18" Sep 29 18:59:37 crc kubenswrapper[4792]: E0929 18:59:37.066724 4792 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/community-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-qjgzc,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod community-operators-grgxc_openshift-marketplace(51c4e50f-73c5-485d-a704-b0454f9cfde5): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Sep 29 18:59:37 crc kubenswrapper[4792]: E0929 18:59:37.067899 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/community-operators-grgxc" podUID="51c4e50f-73c5-485d-a704-b0454f9cfde5" Sep 29 18:59:37 crc kubenswrapper[4792]: E0929 18:59:37.215502 4792 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/certified-operator-index:v4.18" Sep 29 18:59:37 crc kubenswrapper[4792]: E0929 18:59:37.216088 4792 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/certified-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-2gmfg,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod certified-operators-wmjgp_openshift-marketplace(c4e9ba50-617f-4f99-9430-ee6fb3d21b8e): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Sep 29 18:59:37 crc kubenswrapper[4792]: E0929 18:59:37.217643 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/certified-operators-wmjgp" podUID="c4e9ba50-617f-4f99-9430-ee6fb3d21b8e" Sep 29 18:59:38 crc kubenswrapper[4792]: I0929 18:59:38.006412 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-558ts" event={"ID":"103cb699-909d-400e-97cc-872f7769a806","Type":"ContainerStarted","Data":"a7a2f0823541a22b8c9858d36d9abb8bef54e0b9b8ecb9512fb8d42a7e17b784"} Sep 29 18:59:38 crc kubenswrapper[4792]: I0929 18:59:38.008294 4792 generic.go:334] "Generic (PLEG): container finished" podID="f9df84a0-6eb0-415a-ae29-c93ba496f855" containerID="a7efc8f1ef22d26cfefe3c43133c19058f502c01dbe9619e9cf5a3e4a976e818" exitCode=0 Sep 29 18:59:38 crc kubenswrapper[4792]: I0929 18:59:38.008439 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-rwhnq" event={"ID":"f9df84a0-6eb0-415a-ae29-c93ba496f855","Type":"ContainerDied","Data":"a7efc8f1ef22d26cfefe3c43133c19058f502c01dbe9619e9cf5a3e4a976e818"} Sep 29 18:59:38 crc kubenswrapper[4792]: I0929 18:59:38.010199 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-f9k8v" event={"ID":"82bcbfe6-9f95-4749-80a0-81b82a1b78d9","Type":"ContainerStarted","Data":"aaaec94afd5e18fefb07f09543677d69d256a3fd756bd796b4155d5c5b9e1fa5"} Sep 29 18:59:38 crc kubenswrapper[4792]: I0929 18:59:38.014957 4792 generic.go:334] "Generic (PLEG): container finished" podID="de0a9077-e8b8-4b2c-bfdf-4e965627f520" containerID="e6abf063928795bd8228eadfdeef10b45d8b13c61bd2dec5b8495a4c0f2d087b" exitCode=0 Sep 29 18:59:38 crc kubenswrapper[4792]: I0929 18:59:38.015105 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-llhzn" event={"ID":"de0a9077-e8b8-4b2c-bfdf-4e965627f520","Type":"ContainerDied","Data":"e6abf063928795bd8228eadfdeef10b45d8b13c61bd2dec5b8495a4c0f2d087b"} Sep 29 18:59:38 crc kubenswrapper[4792]: I0929 18:59:38.017561 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-v5b2m" event={"ID":"fd292349-0e5a-4d80-b163-193aa43c98db","Type":"ContainerStarted","Data":"5450bebc202ee4c271ead3d271163a447c148c16befbc5189d03001d4427e659"} Sep 29 18:59:38 crc kubenswrapper[4792]: I0929 18:59:38.019762 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-zr4zx" event={"ID":"71994559-92a6-4331-bbbb-04e9d2498c9b","Type":"ContainerStarted","Data":"227e2ab5fa1528a0678e41a5101423a9d3c21988c24453cb3f887a2e2832933e"} Sep 29 18:59:38 crc kubenswrapper[4792]: E0929 18:59:38.022773 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"\"" pod="openshift-marketplace/community-operators-grgxc" podUID="51c4e50f-73c5-485d-a704-b0454f9cfde5" Sep 29 18:59:38 crc kubenswrapper[4792]: E0929 18:59:38.023416 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"\"" pod="openshift-marketplace/certified-operators-wmjgp" podUID="c4e9ba50-617f-4f99-9430-ee6fb3d21b8e" Sep 29 18:59:39 crc kubenswrapper[4792]: I0929 18:59:39.037936 4792 generic.go:334] "Generic (PLEG): container finished" podID="71994559-92a6-4331-bbbb-04e9d2498c9b" containerID="227e2ab5fa1528a0678e41a5101423a9d3c21988c24453cb3f887a2e2832933e" exitCode=0 Sep 29 18:59:39 crc kubenswrapper[4792]: I0929 18:59:39.038047 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-zr4zx" event={"ID":"71994559-92a6-4331-bbbb-04e9d2498c9b","Type":"ContainerDied","Data":"227e2ab5fa1528a0678e41a5101423a9d3c21988c24453cb3f887a2e2832933e"} Sep 29 18:59:39 crc kubenswrapper[4792]: I0929 18:59:39.054507 4792 generic.go:334] "Generic (PLEG): container finished" podID="103cb699-909d-400e-97cc-872f7769a806" containerID="a7a2f0823541a22b8c9858d36d9abb8bef54e0b9b8ecb9512fb8d42a7e17b784" exitCode=0 Sep 29 18:59:39 crc kubenswrapper[4792]: I0929 18:59:39.054596 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-558ts" event={"ID":"103cb699-909d-400e-97cc-872f7769a806","Type":"ContainerDied","Data":"a7a2f0823541a22b8c9858d36d9abb8bef54e0b9b8ecb9512fb8d42a7e17b784"} Sep 29 18:59:39 crc kubenswrapper[4792]: I0929 18:59:39.062760 4792 generic.go:334] "Generic (PLEG): container finished" podID="82bcbfe6-9f95-4749-80a0-81b82a1b78d9" containerID="aaaec94afd5e18fefb07f09543677d69d256a3fd756bd796b4155d5c5b9e1fa5" exitCode=0 Sep 29 18:59:39 crc kubenswrapper[4792]: I0929 18:59:39.062818 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-f9k8v" event={"ID":"82bcbfe6-9f95-4749-80a0-81b82a1b78d9","Type":"ContainerDied","Data":"aaaec94afd5e18fefb07f09543677d69d256a3fd756bd796b4155d5c5b9e1fa5"} Sep 29 18:59:39 crc kubenswrapper[4792]: I0929 18:59:39.071912 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-v5b2m" event={"ID":"fd292349-0e5a-4d80-b163-193aa43c98db","Type":"ContainerStarted","Data":"bccbdd11ba1b57c126af369f6e05e6635c1cf6b7be82486299cbd701365ce027"} Sep 29 18:59:39 crc kubenswrapper[4792]: I0929 18:59:39.137976 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/network-metrics-daemon-v5b2m" podStartSLOduration=166.136741476 podStartE2EDuration="2m46.136741476s" podCreationTimestamp="2025-09-29 18:56:53 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 18:59:39.1336005 +0000 UTC m=+191.126907936" watchObservedRunningTime="2025-09-29 18:59:39.136741476 +0000 UTC m=+191.130048882" Sep 29 18:59:41 crc kubenswrapper[4792]: I0929 18:59:41.959838 4792 patch_prober.go:28] interesting pod/machine-config-daemon-p5q59 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 18:59:41 crc kubenswrapper[4792]: I0929 18:59:41.960618 4792 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" podUID="0ae66548-086e-4ca9-bd6f-281ce46e7557" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 18:59:42 crc kubenswrapper[4792]: I0929 18:59:42.101703 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-rwhnq" event={"ID":"f9df84a0-6eb0-415a-ae29-c93ba496f855","Type":"ContainerStarted","Data":"91a7ed73d691f49f1f4f3cacf336bc000ac0ea984bb4b4662ab4c51557c092c3"} Sep 29 18:59:42 crc kubenswrapper[4792]: I0929 18:59:42.122385 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-rwhnq" podStartSLOduration=4.472751636 podStartE2EDuration="42.122365929s" podCreationTimestamp="2025-09-29 18:59:00 +0000 UTC" firstStartedPulling="2025-09-29 18:59:03.407321916 +0000 UTC m=+155.400629312" lastFinishedPulling="2025-09-29 18:59:41.056936209 +0000 UTC m=+193.050243605" observedRunningTime="2025-09-29 18:59:42.12019317 +0000 UTC m=+194.113500596" watchObservedRunningTime="2025-09-29 18:59:42.122365929 +0000 UTC m=+194.115673315" Sep 29 18:59:43 crc kubenswrapper[4792]: I0929 18:59:43.109836 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-llhzn" event={"ID":"de0a9077-e8b8-4b2c-bfdf-4e965627f520","Type":"ContainerStarted","Data":"0a2bf2229314f16f943c0314ec0dbfbe7261126e2040610b1c8ef94769f07a47"} Sep 29 18:59:45 crc kubenswrapper[4792]: I0929 18:59:45.119304 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-zr4zx" event={"ID":"71994559-92a6-4331-bbbb-04e9d2498c9b","Type":"ContainerStarted","Data":"0c0d65218eefd43259d3addcf0d6fd34bd71915cdf7387e114268297dc9b58ab"} Sep 29 18:59:45 crc kubenswrapper[4792]: I0929 18:59:45.122632 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-558ts" event={"ID":"103cb699-909d-400e-97cc-872f7769a806","Type":"ContainerStarted","Data":"06532d5843d34e8d1f8c63c5dab6fe837e1ca7d3cf7ce4d337b5a87f243bb42a"} Sep 29 18:59:45 crc kubenswrapper[4792]: I0929 18:59:45.124625 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-f9k8v" event={"ID":"82bcbfe6-9f95-4749-80a0-81b82a1b78d9","Type":"ContainerStarted","Data":"86fe569b98b9da981e5c8b08bb36f61867ddaf26197d95b69c92c3af5fdfa8fa"} Sep 29 18:59:45 crc kubenswrapper[4792]: I0929 18:59:45.146935 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-zr4zx" podStartSLOduration=3.099403354 podStartE2EDuration="44.146919054s" podCreationTimestamp="2025-09-29 18:59:01 +0000 UTC" firstStartedPulling="2025-09-29 18:59:03.425214474 +0000 UTC m=+155.418521870" lastFinishedPulling="2025-09-29 18:59:44.472730174 +0000 UTC m=+196.466037570" observedRunningTime="2025-09-29 18:59:45.145214617 +0000 UTC m=+197.138522013" watchObservedRunningTime="2025-09-29 18:59:45.146919054 +0000 UTC m=+197.140226450" Sep 29 18:59:45 crc kubenswrapper[4792]: I0929 18:59:45.147572 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-llhzn" podStartSLOduration=7.047698054 podStartE2EDuration="47.147567872s" podCreationTimestamp="2025-09-29 18:58:58 +0000 UTC" firstStartedPulling="2025-09-29 18:59:02.300863097 +0000 UTC m=+154.294170493" lastFinishedPulling="2025-09-29 18:59:42.400732895 +0000 UTC m=+194.394040311" observedRunningTime="2025-09-29 18:59:44.139222796 +0000 UTC m=+196.132530202" watchObservedRunningTime="2025-09-29 18:59:45.147567872 +0000 UTC m=+197.140875268" Sep 29 18:59:45 crc kubenswrapper[4792]: I0929 18:59:45.162559 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-f9k8v" podStartSLOduration=5.5006550480000005 podStartE2EDuration="47.162536039s" podCreationTimestamp="2025-09-29 18:58:58 +0000 UTC" firstStartedPulling="2025-09-29 18:59:02.283483243 +0000 UTC m=+154.276790639" lastFinishedPulling="2025-09-29 18:59:43.945364214 +0000 UTC m=+195.938671630" observedRunningTime="2025-09-29 18:59:45.160696149 +0000 UTC m=+197.154003555" watchObservedRunningTime="2025-09-29 18:59:45.162536039 +0000 UTC m=+197.155843435" Sep 29 18:59:45 crc kubenswrapper[4792]: I0929 18:59:45.184540 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-558ts" podStartSLOduration=3.264068479 podStartE2EDuration="43.184518128s" podCreationTimestamp="2025-09-29 18:59:02 +0000 UTC" firstStartedPulling="2025-09-29 18:59:04.53703875 +0000 UTC m=+156.530346146" lastFinishedPulling="2025-09-29 18:59:44.457488399 +0000 UTC m=+196.450795795" observedRunningTime="2025-09-29 18:59:45.180654273 +0000 UTC m=+197.173961669" watchObservedRunningTime="2025-09-29 18:59:45.184518128 +0000 UTC m=+197.177825524" Sep 29 18:59:48 crc kubenswrapper[4792]: I0929 18:59:48.765984 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-llhzn" Sep 29 18:59:48 crc kubenswrapper[4792]: I0929 18:59:48.767704 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-llhzn" Sep 29 18:59:49 crc kubenswrapper[4792]: I0929 18:59:49.393143 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-f9k8v" Sep 29 18:59:49 crc kubenswrapper[4792]: I0929 18:59:49.394650 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-f9k8v" Sep 29 18:59:49 crc kubenswrapper[4792]: I0929 18:59:49.524001 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-f9k8v" Sep 29 18:59:49 crc kubenswrapper[4792]: I0929 18:59:49.525294 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-llhzn" Sep 29 18:59:50 crc kubenswrapper[4792]: I0929 18:59:50.183990 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-f9k8v" Sep 29 18:59:50 crc kubenswrapper[4792]: I0929 18:59:50.188494 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-llhzn" Sep 29 18:59:50 crc kubenswrapper[4792]: I0929 18:59:50.622079 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-rwhnq" Sep 29 18:59:50 crc kubenswrapper[4792]: I0929 18:59:50.622132 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-rwhnq" Sep 29 18:59:50 crc kubenswrapper[4792]: I0929 18:59:50.666924 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-rwhnq" Sep 29 18:59:51 crc kubenswrapper[4792]: I0929 18:59:51.195218 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-rwhnq" Sep 29 18:59:52 crc kubenswrapper[4792]: I0929 18:59:52.107544 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-zr4zx" Sep 29 18:59:52 crc kubenswrapper[4792]: I0929 18:59:52.107588 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-zr4zx" Sep 29 18:59:52 crc kubenswrapper[4792]: I0929 18:59:52.149558 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-zr4zx" Sep 29 18:59:52 crc kubenswrapper[4792]: I0929 18:59:52.207657 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-zr4zx" Sep 29 18:59:52 crc kubenswrapper[4792]: I0929 18:59:52.260368 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-f9k8v"] Sep 29 18:59:52 crc kubenswrapper[4792]: I0929 18:59:52.261248 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-f9k8v" podUID="82bcbfe6-9f95-4749-80a0-81b82a1b78d9" containerName="registry-server" containerID="cri-o://86fe569b98b9da981e5c8b08bb36f61867ddaf26197d95b69c92c3af5fdfa8fa" gracePeriod=2 Sep 29 18:59:52 crc kubenswrapper[4792]: I0929 18:59:52.525553 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-558ts" Sep 29 18:59:52 crc kubenswrapper[4792]: I0929 18:59:52.526332 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-558ts" Sep 29 18:59:52 crc kubenswrapper[4792]: I0929 18:59:52.565477 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-558ts" Sep 29 18:59:53 crc kubenswrapper[4792]: I0929 18:59:53.176479 4792 generic.go:334] "Generic (PLEG): container finished" podID="82bcbfe6-9f95-4749-80a0-81b82a1b78d9" containerID="86fe569b98b9da981e5c8b08bb36f61867ddaf26197d95b69c92c3af5fdfa8fa" exitCode=0 Sep 29 18:59:53 crc kubenswrapper[4792]: I0929 18:59:53.176591 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-f9k8v" event={"ID":"82bcbfe6-9f95-4749-80a0-81b82a1b78d9","Type":"ContainerDied","Data":"86fe569b98b9da981e5c8b08bb36f61867ddaf26197d95b69c92c3af5fdfa8fa"} Sep 29 18:59:53 crc kubenswrapper[4792]: I0929 18:59:53.216403 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-558ts" Sep 29 18:59:53 crc kubenswrapper[4792]: I0929 18:59:53.274489 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-f9k8v" Sep 29 18:59:53 crc kubenswrapper[4792]: I0929 18:59:53.320789 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ndfj9\" (UniqueName: \"kubernetes.io/projected/82bcbfe6-9f95-4749-80a0-81b82a1b78d9-kube-api-access-ndfj9\") pod \"82bcbfe6-9f95-4749-80a0-81b82a1b78d9\" (UID: \"82bcbfe6-9f95-4749-80a0-81b82a1b78d9\") " Sep 29 18:59:53 crc kubenswrapper[4792]: I0929 18:59:53.321168 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/82bcbfe6-9f95-4749-80a0-81b82a1b78d9-catalog-content\") pod \"82bcbfe6-9f95-4749-80a0-81b82a1b78d9\" (UID: \"82bcbfe6-9f95-4749-80a0-81b82a1b78d9\") " Sep 29 18:59:53 crc kubenswrapper[4792]: I0929 18:59:53.321517 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/82bcbfe6-9f95-4749-80a0-81b82a1b78d9-utilities\") pod \"82bcbfe6-9f95-4749-80a0-81b82a1b78d9\" (UID: \"82bcbfe6-9f95-4749-80a0-81b82a1b78d9\") " Sep 29 18:59:53 crc kubenswrapper[4792]: I0929 18:59:53.322989 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/82bcbfe6-9f95-4749-80a0-81b82a1b78d9-utilities" (OuterVolumeSpecName: "utilities") pod "82bcbfe6-9f95-4749-80a0-81b82a1b78d9" (UID: "82bcbfe6-9f95-4749-80a0-81b82a1b78d9"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 18:59:53 crc kubenswrapper[4792]: I0929 18:59:53.328670 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/82bcbfe6-9f95-4749-80a0-81b82a1b78d9-kube-api-access-ndfj9" (OuterVolumeSpecName: "kube-api-access-ndfj9") pod "82bcbfe6-9f95-4749-80a0-81b82a1b78d9" (UID: "82bcbfe6-9f95-4749-80a0-81b82a1b78d9"). InnerVolumeSpecName "kube-api-access-ndfj9". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 18:59:53 crc kubenswrapper[4792]: I0929 18:59:53.371237 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/82bcbfe6-9f95-4749-80a0-81b82a1b78d9-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "82bcbfe6-9f95-4749-80a0-81b82a1b78d9" (UID: "82bcbfe6-9f95-4749-80a0-81b82a1b78d9"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 18:59:53 crc kubenswrapper[4792]: I0929 18:59:53.423316 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ndfj9\" (UniqueName: \"kubernetes.io/projected/82bcbfe6-9f95-4749-80a0-81b82a1b78d9-kube-api-access-ndfj9\") on node \"crc\" DevicePath \"\"" Sep 29 18:59:53 crc kubenswrapper[4792]: I0929 18:59:53.423367 4792 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/82bcbfe6-9f95-4749-80a0-81b82a1b78d9-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 18:59:53 crc kubenswrapper[4792]: I0929 18:59:53.423386 4792 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/82bcbfe6-9f95-4749-80a0-81b82a1b78d9-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 18:59:54 crc kubenswrapper[4792]: I0929 18:59:54.183179 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-f9k8v" Sep 29 18:59:54 crc kubenswrapper[4792]: I0929 18:59:54.183727 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-f9k8v" event={"ID":"82bcbfe6-9f95-4749-80a0-81b82a1b78d9","Type":"ContainerDied","Data":"1ffb287befc01f4843fecb7c9e3f99e0de835f233c21713628af9cc70dfa60c5"} Sep 29 18:59:54 crc kubenswrapper[4792]: I0929 18:59:54.183896 4792 scope.go:117] "RemoveContainer" containerID="86fe569b98b9da981e5c8b08bb36f61867ddaf26197d95b69c92c3af5fdfa8fa" Sep 29 18:59:54 crc kubenswrapper[4792]: I0929 18:59:54.219215 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-f9k8v"] Sep 29 18:59:54 crc kubenswrapper[4792]: I0929 18:59:54.222581 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-f9k8v"] Sep 29 18:59:54 crc kubenswrapper[4792]: I0929 18:59:54.530595 4792 scope.go:117] "RemoveContainer" containerID="aaaec94afd5e18fefb07f09543677d69d256a3fd756bd796b4155d5c5b9e1fa5" Sep 29 18:59:54 crc kubenswrapper[4792]: I0929 18:59:54.547167 4792 scope.go:117] "RemoveContainer" containerID="6e2ae06982081f4e1198b2b5861bf37f09b3bd8e3825f52c0ced7ab07954b8c4" Sep 29 18:59:55 crc kubenswrapper[4792]: I0929 18:59:55.022574 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="82bcbfe6-9f95-4749-80a0-81b82a1b78d9" path="/var/lib/kubelet/pods/82bcbfe6-9f95-4749-80a0-81b82a1b78d9/volumes" Sep 29 18:59:55 crc kubenswrapper[4792]: I0929 18:59:55.192243 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-wmjgp" event={"ID":"c4e9ba50-617f-4f99-9430-ee6fb3d21b8e","Type":"ContainerStarted","Data":"6f84229f16b6c209e965485ad6e16aff40cca6f6b74e880250e15755dc87b664"} Sep 29 18:59:56 crc kubenswrapper[4792]: I0929 18:59:56.213159 4792 generic.go:334] "Generic (PLEG): container finished" podID="b2e3cd89-1359-4a18-ade6-05cba0c68e70" containerID="7c485859f74999aacd17b38df972404a409e4a8f03dff3c4306d78cb325201b0" exitCode=0 Sep 29 18:59:56 crc kubenswrapper[4792]: I0929 18:59:56.213285 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-7kmj2" event={"ID":"b2e3cd89-1359-4a18-ade6-05cba0c68e70","Type":"ContainerDied","Data":"7c485859f74999aacd17b38df972404a409e4a8f03dff3c4306d78cb325201b0"} Sep 29 18:59:56 crc kubenswrapper[4792]: I0929 18:59:56.216276 4792 generic.go:334] "Generic (PLEG): container finished" podID="51c4e50f-73c5-485d-a704-b0454f9cfde5" containerID="d9a09dfcad17708a28569b778dee045de64c9006cf82f99b0820471799201639" exitCode=0 Sep 29 18:59:56 crc kubenswrapper[4792]: I0929 18:59:56.216336 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-grgxc" event={"ID":"51c4e50f-73c5-485d-a704-b0454f9cfde5","Type":"ContainerDied","Data":"d9a09dfcad17708a28569b778dee045de64c9006cf82f99b0820471799201639"} Sep 29 18:59:56 crc kubenswrapper[4792]: I0929 18:59:56.222869 4792 generic.go:334] "Generic (PLEG): container finished" podID="c4e9ba50-617f-4f99-9430-ee6fb3d21b8e" containerID="6f84229f16b6c209e965485ad6e16aff40cca6f6b74e880250e15755dc87b664" exitCode=0 Sep 29 18:59:56 crc kubenswrapper[4792]: I0929 18:59:56.222940 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-wmjgp" event={"ID":"c4e9ba50-617f-4f99-9430-ee6fb3d21b8e","Type":"ContainerDied","Data":"6f84229f16b6c209e965485ad6e16aff40cca6f6b74e880250e15755dc87b664"} Sep 29 18:59:56 crc kubenswrapper[4792]: I0929 18:59:56.461195 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-558ts"] Sep 29 18:59:56 crc kubenswrapper[4792]: I0929 18:59:56.462067 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-558ts" podUID="103cb699-909d-400e-97cc-872f7769a806" containerName="registry-server" containerID="cri-o://06532d5843d34e8d1f8c63c5dab6fe837e1ca7d3cf7ce4d337b5a87f243bb42a" gracePeriod=2 Sep 29 18:59:56 crc kubenswrapper[4792]: I0929 18:59:56.860714 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-558ts" Sep 29 18:59:56 crc kubenswrapper[4792]: I0929 18:59:56.973349 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/103cb699-909d-400e-97cc-872f7769a806-catalog-content\") pod \"103cb699-909d-400e-97cc-872f7769a806\" (UID: \"103cb699-909d-400e-97cc-872f7769a806\") " Sep 29 18:59:56 crc kubenswrapper[4792]: I0929 18:59:56.973454 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/103cb699-909d-400e-97cc-872f7769a806-utilities\") pod \"103cb699-909d-400e-97cc-872f7769a806\" (UID: \"103cb699-909d-400e-97cc-872f7769a806\") " Sep 29 18:59:56 crc kubenswrapper[4792]: I0929 18:59:56.973582 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6wtt7\" (UniqueName: \"kubernetes.io/projected/103cb699-909d-400e-97cc-872f7769a806-kube-api-access-6wtt7\") pod \"103cb699-909d-400e-97cc-872f7769a806\" (UID: \"103cb699-909d-400e-97cc-872f7769a806\") " Sep 29 18:59:56 crc kubenswrapper[4792]: I0929 18:59:56.975891 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/103cb699-909d-400e-97cc-872f7769a806-utilities" (OuterVolumeSpecName: "utilities") pod "103cb699-909d-400e-97cc-872f7769a806" (UID: "103cb699-909d-400e-97cc-872f7769a806"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 18:59:56 crc kubenswrapper[4792]: I0929 18:59:56.981773 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/103cb699-909d-400e-97cc-872f7769a806-kube-api-access-6wtt7" (OuterVolumeSpecName: "kube-api-access-6wtt7") pod "103cb699-909d-400e-97cc-872f7769a806" (UID: "103cb699-909d-400e-97cc-872f7769a806"). InnerVolumeSpecName "kube-api-access-6wtt7". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 18:59:57 crc kubenswrapper[4792]: I0929 18:59:57.076793 4792 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/103cb699-909d-400e-97cc-872f7769a806-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 18:59:57 crc kubenswrapper[4792]: I0929 18:59:57.076829 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6wtt7\" (UniqueName: \"kubernetes.io/projected/103cb699-909d-400e-97cc-872f7769a806-kube-api-access-6wtt7\") on node \"crc\" DevicePath \"\"" Sep 29 18:59:57 crc kubenswrapper[4792]: I0929 18:59:57.078938 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/103cb699-909d-400e-97cc-872f7769a806-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "103cb699-909d-400e-97cc-872f7769a806" (UID: "103cb699-909d-400e-97cc-872f7769a806"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 18:59:57 crc kubenswrapper[4792]: I0929 18:59:57.178371 4792 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/103cb699-909d-400e-97cc-872f7769a806-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 18:59:57 crc kubenswrapper[4792]: I0929 18:59:57.231011 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-wmjgp" event={"ID":"c4e9ba50-617f-4f99-9430-ee6fb3d21b8e","Type":"ContainerStarted","Data":"5a961a166e2e652bd3f74ea6ad271d1dbff21f3a6f515108e484304ade0d2048"} Sep 29 18:59:57 crc kubenswrapper[4792]: I0929 18:59:57.233372 4792 generic.go:334] "Generic (PLEG): container finished" podID="103cb699-909d-400e-97cc-872f7769a806" containerID="06532d5843d34e8d1f8c63c5dab6fe837e1ca7d3cf7ce4d337b5a87f243bb42a" exitCode=0 Sep 29 18:59:57 crc kubenswrapper[4792]: I0929 18:59:57.233502 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-558ts" Sep 29 18:59:57 crc kubenswrapper[4792]: I0929 18:59:57.233538 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-558ts" event={"ID":"103cb699-909d-400e-97cc-872f7769a806","Type":"ContainerDied","Data":"06532d5843d34e8d1f8c63c5dab6fe837e1ca7d3cf7ce4d337b5a87f243bb42a"} Sep 29 18:59:57 crc kubenswrapper[4792]: I0929 18:59:57.233741 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-558ts" event={"ID":"103cb699-909d-400e-97cc-872f7769a806","Type":"ContainerDied","Data":"75f1d17a988445f12301e96135d1417bda99b4fcdfeaf5fc0b1a9089dc61a7ea"} Sep 29 18:59:57 crc kubenswrapper[4792]: I0929 18:59:57.233763 4792 scope.go:117] "RemoveContainer" containerID="06532d5843d34e8d1f8c63c5dab6fe837e1ca7d3cf7ce4d337b5a87f243bb42a" Sep 29 18:59:57 crc kubenswrapper[4792]: I0929 18:59:57.237429 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-7kmj2" event={"ID":"b2e3cd89-1359-4a18-ade6-05cba0c68e70","Type":"ContainerStarted","Data":"4e63c8d14698ad77f2572062f7da73842bc372475aca7e44a3fa5fc59e26f79a"} Sep 29 18:59:57 crc kubenswrapper[4792]: I0929 18:59:57.241073 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-grgxc" event={"ID":"51c4e50f-73c5-485d-a704-b0454f9cfde5","Type":"ContainerStarted","Data":"8e1f5a50f6e641b40ac198f581c022665d8ce57d3af67e90064fe226847af085"} Sep 29 18:59:57 crc kubenswrapper[4792]: I0929 18:59:57.253550 4792 scope.go:117] "RemoveContainer" containerID="a7a2f0823541a22b8c9858d36d9abb8bef54e0b9b8ecb9512fb8d42a7e17b784" Sep 29 18:59:57 crc kubenswrapper[4792]: I0929 18:59:57.257785 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-wmjgp" podStartSLOduration=4.466033607 podStartE2EDuration="59.257760766s" podCreationTimestamp="2025-09-29 18:58:58 +0000 UTC" firstStartedPulling="2025-09-29 18:59:02.21362803 +0000 UTC m=+154.206935426" lastFinishedPulling="2025-09-29 18:59:57.005355189 +0000 UTC m=+208.998662585" observedRunningTime="2025-09-29 18:59:57.256292646 +0000 UTC m=+209.249600062" watchObservedRunningTime="2025-09-29 18:59:57.257760766 +0000 UTC m=+209.251068162" Sep 29 18:59:57 crc kubenswrapper[4792]: I0929 18:59:57.277274 4792 scope.go:117] "RemoveContainer" containerID="d65eea228d99012fb9647d3fdcff0cb6b1fdf83bc13c5ac9f43ec774b6b1a618" Sep 29 18:59:57 crc kubenswrapper[4792]: I0929 18:59:57.296886 4792 scope.go:117] "RemoveContainer" containerID="06532d5843d34e8d1f8c63c5dab6fe837e1ca7d3cf7ce4d337b5a87f243bb42a" Sep 29 18:59:57 crc kubenswrapper[4792]: E0929 18:59:57.297480 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"06532d5843d34e8d1f8c63c5dab6fe837e1ca7d3cf7ce4d337b5a87f243bb42a\": container with ID starting with 06532d5843d34e8d1f8c63c5dab6fe837e1ca7d3cf7ce4d337b5a87f243bb42a not found: ID does not exist" containerID="06532d5843d34e8d1f8c63c5dab6fe837e1ca7d3cf7ce4d337b5a87f243bb42a" Sep 29 18:59:57 crc kubenswrapper[4792]: I0929 18:59:57.297567 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"06532d5843d34e8d1f8c63c5dab6fe837e1ca7d3cf7ce4d337b5a87f243bb42a"} err="failed to get container status \"06532d5843d34e8d1f8c63c5dab6fe837e1ca7d3cf7ce4d337b5a87f243bb42a\": rpc error: code = NotFound desc = could not find container \"06532d5843d34e8d1f8c63c5dab6fe837e1ca7d3cf7ce4d337b5a87f243bb42a\": container with ID starting with 06532d5843d34e8d1f8c63c5dab6fe837e1ca7d3cf7ce4d337b5a87f243bb42a not found: ID does not exist" Sep 29 18:59:57 crc kubenswrapper[4792]: I0929 18:59:57.297641 4792 scope.go:117] "RemoveContainer" containerID="a7a2f0823541a22b8c9858d36d9abb8bef54e0b9b8ecb9512fb8d42a7e17b784" Sep 29 18:59:57 crc kubenswrapper[4792]: I0929 18:59:57.301781 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-grgxc" podStartSLOduration=4.860214388 podStartE2EDuration="59.301757205s" podCreationTimestamp="2025-09-29 18:58:58 +0000 UTC" firstStartedPulling="2025-09-29 18:59:02.207222465 +0000 UTC m=+154.200529861" lastFinishedPulling="2025-09-29 18:59:56.648765282 +0000 UTC m=+208.642072678" observedRunningTime="2025-09-29 18:59:57.300009728 +0000 UTC m=+209.293317144" watchObservedRunningTime="2025-09-29 18:59:57.301757205 +0000 UTC m=+209.295064601" Sep 29 18:59:57 crc kubenswrapper[4792]: I0929 18:59:57.302264 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-7kmj2" podStartSLOduration=2.846090163 podStartE2EDuration="57.302257579s" podCreationTimestamp="2025-09-29 18:59:00 +0000 UTC" firstStartedPulling="2025-09-29 18:59:02.232708079 +0000 UTC m=+154.226015475" lastFinishedPulling="2025-09-29 18:59:56.688875495 +0000 UTC m=+208.682182891" observedRunningTime="2025-09-29 18:59:57.277769152 +0000 UTC m=+209.271076558" watchObservedRunningTime="2025-09-29 18:59:57.302257579 +0000 UTC m=+209.295564985" Sep 29 18:59:57 crc kubenswrapper[4792]: E0929 18:59:57.305404 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a7a2f0823541a22b8c9858d36d9abb8bef54e0b9b8ecb9512fb8d42a7e17b784\": container with ID starting with a7a2f0823541a22b8c9858d36d9abb8bef54e0b9b8ecb9512fb8d42a7e17b784 not found: ID does not exist" containerID="a7a2f0823541a22b8c9858d36d9abb8bef54e0b9b8ecb9512fb8d42a7e17b784" Sep 29 18:59:57 crc kubenswrapper[4792]: I0929 18:59:57.305441 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a7a2f0823541a22b8c9858d36d9abb8bef54e0b9b8ecb9512fb8d42a7e17b784"} err="failed to get container status \"a7a2f0823541a22b8c9858d36d9abb8bef54e0b9b8ecb9512fb8d42a7e17b784\": rpc error: code = NotFound desc = could not find container \"a7a2f0823541a22b8c9858d36d9abb8bef54e0b9b8ecb9512fb8d42a7e17b784\": container with ID starting with a7a2f0823541a22b8c9858d36d9abb8bef54e0b9b8ecb9512fb8d42a7e17b784 not found: ID does not exist" Sep 29 18:59:57 crc kubenswrapper[4792]: I0929 18:59:57.305464 4792 scope.go:117] "RemoveContainer" containerID="d65eea228d99012fb9647d3fdcff0cb6b1fdf83bc13c5ac9f43ec774b6b1a618" Sep 29 18:59:57 crc kubenswrapper[4792]: E0929 18:59:57.305868 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d65eea228d99012fb9647d3fdcff0cb6b1fdf83bc13c5ac9f43ec774b6b1a618\": container with ID starting with d65eea228d99012fb9647d3fdcff0cb6b1fdf83bc13c5ac9f43ec774b6b1a618 not found: ID does not exist" containerID="d65eea228d99012fb9647d3fdcff0cb6b1fdf83bc13c5ac9f43ec774b6b1a618" Sep 29 18:59:57 crc kubenswrapper[4792]: I0929 18:59:57.305896 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d65eea228d99012fb9647d3fdcff0cb6b1fdf83bc13c5ac9f43ec774b6b1a618"} err="failed to get container status \"d65eea228d99012fb9647d3fdcff0cb6b1fdf83bc13c5ac9f43ec774b6b1a618\": rpc error: code = NotFound desc = could not find container \"d65eea228d99012fb9647d3fdcff0cb6b1fdf83bc13c5ac9f43ec774b6b1a618\": container with ID starting with d65eea228d99012fb9647d3fdcff0cb6b1fdf83bc13c5ac9f43ec774b6b1a618 not found: ID does not exist" Sep 29 18:59:57 crc kubenswrapper[4792]: I0929 18:59:57.317789 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-558ts"] Sep 29 18:59:57 crc kubenswrapper[4792]: I0929 18:59:57.321638 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-558ts"] Sep 29 18:59:59 crc kubenswrapper[4792]: I0929 18:59:59.023059 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="103cb699-909d-400e-97cc-872f7769a806" path="/var/lib/kubelet/pods/103cb699-909d-400e-97cc-872f7769a806/volumes" Sep 29 18:59:59 crc kubenswrapper[4792]: I0929 18:59:59.140919 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-grgxc" Sep 29 18:59:59 crc kubenswrapper[4792]: I0929 18:59:59.140982 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-grgxc" Sep 29 18:59:59 crc kubenswrapper[4792]: I0929 18:59:59.182802 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-grgxc" Sep 29 18:59:59 crc kubenswrapper[4792]: I0929 18:59:59.398711 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-wmjgp" Sep 29 18:59:59 crc kubenswrapper[4792]: I0929 18:59:59.398765 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-wmjgp" Sep 29 18:59:59 crc kubenswrapper[4792]: I0929 18:59:59.454004 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-wmjgp" Sep 29 19:00:00 crc kubenswrapper[4792]: I0929 19:00:00.140440 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319540-7hrng"] Sep 29 19:00:00 crc kubenswrapper[4792]: E0929 19:00:00.140654 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5cc522d7-c98b-4faf-8962-0a0e7274e18f" containerName="pruner" Sep 29 19:00:00 crc kubenswrapper[4792]: I0929 19:00:00.140667 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="5cc522d7-c98b-4faf-8962-0a0e7274e18f" containerName="pruner" Sep 29 19:00:00 crc kubenswrapper[4792]: E0929 19:00:00.140676 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="103cb699-909d-400e-97cc-872f7769a806" containerName="registry-server" Sep 29 19:00:00 crc kubenswrapper[4792]: I0929 19:00:00.140682 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="103cb699-909d-400e-97cc-872f7769a806" containerName="registry-server" Sep 29 19:00:00 crc kubenswrapper[4792]: E0929 19:00:00.140691 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="103cb699-909d-400e-97cc-872f7769a806" containerName="extract-utilities" Sep 29 19:00:00 crc kubenswrapper[4792]: I0929 19:00:00.140697 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="103cb699-909d-400e-97cc-872f7769a806" containerName="extract-utilities" Sep 29 19:00:00 crc kubenswrapper[4792]: E0929 19:00:00.140707 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="103cb699-909d-400e-97cc-872f7769a806" containerName="extract-content" Sep 29 19:00:00 crc kubenswrapper[4792]: I0929 19:00:00.140713 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="103cb699-909d-400e-97cc-872f7769a806" containerName="extract-content" Sep 29 19:00:00 crc kubenswrapper[4792]: E0929 19:00:00.140725 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="850d3d01-4107-4025-8546-c95be2ce7578" containerName="pruner" Sep 29 19:00:00 crc kubenswrapper[4792]: I0929 19:00:00.140730 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="850d3d01-4107-4025-8546-c95be2ce7578" containerName="pruner" Sep 29 19:00:00 crc kubenswrapper[4792]: E0929 19:00:00.140739 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="82bcbfe6-9f95-4749-80a0-81b82a1b78d9" containerName="extract-content" Sep 29 19:00:00 crc kubenswrapper[4792]: I0929 19:00:00.140746 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="82bcbfe6-9f95-4749-80a0-81b82a1b78d9" containerName="extract-content" Sep 29 19:00:00 crc kubenswrapper[4792]: E0929 19:00:00.140762 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="82bcbfe6-9f95-4749-80a0-81b82a1b78d9" containerName="extract-utilities" Sep 29 19:00:00 crc kubenswrapper[4792]: I0929 19:00:00.140768 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="82bcbfe6-9f95-4749-80a0-81b82a1b78d9" containerName="extract-utilities" Sep 29 19:00:00 crc kubenswrapper[4792]: E0929 19:00:00.140774 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="82bcbfe6-9f95-4749-80a0-81b82a1b78d9" containerName="registry-server" Sep 29 19:00:00 crc kubenswrapper[4792]: I0929 19:00:00.140780 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="82bcbfe6-9f95-4749-80a0-81b82a1b78d9" containerName="registry-server" Sep 29 19:00:00 crc kubenswrapper[4792]: I0929 19:00:00.140886 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="103cb699-909d-400e-97cc-872f7769a806" containerName="registry-server" Sep 29 19:00:00 crc kubenswrapper[4792]: I0929 19:00:00.140899 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="850d3d01-4107-4025-8546-c95be2ce7578" containerName="pruner" Sep 29 19:00:00 crc kubenswrapper[4792]: I0929 19:00:00.140910 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="5cc522d7-c98b-4faf-8962-0a0e7274e18f" containerName="pruner" Sep 29 19:00:00 crc kubenswrapper[4792]: I0929 19:00:00.140919 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="82bcbfe6-9f95-4749-80a0-81b82a1b78d9" containerName="registry-server" Sep 29 19:00:00 crc kubenswrapper[4792]: I0929 19:00:00.141308 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319540-7hrng" Sep 29 19:00:00 crc kubenswrapper[4792]: I0929 19:00:00.143681 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Sep 29 19:00:00 crc kubenswrapper[4792]: I0929 19:00:00.145302 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Sep 29 19:00:00 crc kubenswrapper[4792]: I0929 19:00:00.157676 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319540-7hrng"] Sep 29 19:00:00 crc kubenswrapper[4792]: I0929 19:00:00.219725 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/a627c392-8375-4d2b-be05-3b7b6b697664-secret-volume\") pod \"collect-profiles-29319540-7hrng\" (UID: \"a627c392-8375-4d2b-be05-3b7b6b697664\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319540-7hrng" Sep 29 19:00:00 crc kubenswrapper[4792]: I0929 19:00:00.219801 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-47v8n\" (UniqueName: \"kubernetes.io/projected/a627c392-8375-4d2b-be05-3b7b6b697664-kube-api-access-47v8n\") pod \"collect-profiles-29319540-7hrng\" (UID: \"a627c392-8375-4d2b-be05-3b7b6b697664\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319540-7hrng" Sep 29 19:00:00 crc kubenswrapper[4792]: I0929 19:00:00.219886 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/a627c392-8375-4d2b-be05-3b7b6b697664-config-volume\") pod \"collect-profiles-29319540-7hrng\" (UID: \"a627c392-8375-4d2b-be05-3b7b6b697664\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319540-7hrng" Sep 29 19:00:00 crc kubenswrapper[4792]: I0929 19:00:00.322199 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/a627c392-8375-4d2b-be05-3b7b6b697664-secret-volume\") pod \"collect-profiles-29319540-7hrng\" (UID: \"a627c392-8375-4d2b-be05-3b7b6b697664\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319540-7hrng" Sep 29 19:00:00 crc kubenswrapper[4792]: I0929 19:00:00.322285 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-47v8n\" (UniqueName: \"kubernetes.io/projected/a627c392-8375-4d2b-be05-3b7b6b697664-kube-api-access-47v8n\") pod \"collect-profiles-29319540-7hrng\" (UID: \"a627c392-8375-4d2b-be05-3b7b6b697664\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319540-7hrng" Sep 29 19:00:00 crc kubenswrapper[4792]: I0929 19:00:00.322316 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/a627c392-8375-4d2b-be05-3b7b6b697664-config-volume\") pod \"collect-profiles-29319540-7hrng\" (UID: \"a627c392-8375-4d2b-be05-3b7b6b697664\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319540-7hrng" Sep 29 19:00:00 crc kubenswrapper[4792]: I0929 19:00:00.323504 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/a627c392-8375-4d2b-be05-3b7b6b697664-config-volume\") pod \"collect-profiles-29319540-7hrng\" (UID: \"a627c392-8375-4d2b-be05-3b7b6b697664\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319540-7hrng" Sep 29 19:00:00 crc kubenswrapper[4792]: I0929 19:00:00.330878 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/a627c392-8375-4d2b-be05-3b7b6b697664-secret-volume\") pod \"collect-profiles-29319540-7hrng\" (UID: \"a627c392-8375-4d2b-be05-3b7b6b697664\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319540-7hrng" Sep 29 19:00:00 crc kubenswrapper[4792]: I0929 19:00:00.344304 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-47v8n\" (UniqueName: \"kubernetes.io/projected/a627c392-8375-4d2b-be05-3b7b6b697664-kube-api-access-47v8n\") pod \"collect-profiles-29319540-7hrng\" (UID: \"a627c392-8375-4d2b-be05-3b7b6b697664\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319540-7hrng" Sep 29 19:00:00 crc kubenswrapper[4792]: I0929 19:00:00.461839 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319540-7hrng" Sep 29 19:00:00 crc kubenswrapper[4792]: I0929 19:00:00.926640 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319540-7hrng"] Sep 29 19:00:00 crc kubenswrapper[4792]: W0929 19:00:00.937772 4792 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda627c392_8375_4d2b_be05_3b7b6b697664.slice/crio-5c6ab101208f49f9b96ffb05fb527dbbfa621e33dfe40ad862a2331fa0fc5413 WatchSource:0}: Error finding container 5c6ab101208f49f9b96ffb05fb527dbbfa621e33dfe40ad862a2331fa0fc5413: Status 404 returned error can't find the container with id 5c6ab101208f49f9b96ffb05fb527dbbfa621e33dfe40ad862a2331fa0fc5413 Sep 29 19:00:01 crc kubenswrapper[4792]: I0929 19:00:01.012656 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-7kmj2" Sep 29 19:00:01 crc kubenswrapper[4792]: I0929 19:00:01.012708 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-7kmj2" Sep 29 19:00:01 crc kubenswrapper[4792]: I0929 19:00:01.056052 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-7kmj2" Sep 29 19:00:01 crc kubenswrapper[4792]: I0929 19:00:01.275950 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29319540-7hrng" event={"ID":"a627c392-8375-4d2b-be05-3b7b6b697664","Type":"ContainerStarted","Data":"5c6ab101208f49f9b96ffb05fb527dbbfa621e33dfe40ad862a2331fa0fc5413"} Sep 29 19:00:01 crc kubenswrapper[4792]: I0929 19:00:01.315457 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-7kmj2" Sep 29 19:00:02 crc kubenswrapper[4792]: I0929 19:00:02.283280 4792 generic.go:334] "Generic (PLEG): container finished" podID="a627c392-8375-4d2b-be05-3b7b6b697664" containerID="439ec775c6478dba6c71e3ec1b2e23b47207eb0f683df24624f9027115c2fd81" exitCode=0 Sep 29 19:00:02 crc kubenswrapper[4792]: I0929 19:00:02.283387 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29319540-7hrng" event={"ID":"a627c392-8375-4d2b-be05-3b7b6b697664","Type":"ContainerDied","Data":"439ec775c6478dba6c71e3ec1b2e23b47207eb0f683df24624f9027115c2fd81"} Sep 29 19:00:02 crc kubenswrapper[4792]: I0929 19:00:02.659474 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-7kmj2"] Sep 29 19:00:03 crc kubenswrapper[4792]: I0929 19:00:03.289648 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-7kmj2" podUID="b2e3cd89-1359-4a18-ade6-05cba0c68e70" containerName="registry-server" containerID="cri-o://4e63c8d14698ad77f2572062f7da73842bc372475aca7e44a3fa5fc59e26f79a" gracePeriod=2 Sep 29 19:00:03 crc kubenswrapper[4792]: I0929 19:00:03.527738 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319540-7hrng" Sep 29 19:00:03 crc kubenswrapper[4792]: I0929 19:00:03.692758 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/a627c392-8375-4d2b-be05-3b7b6b697664-secret-volume\") pod \"a627c392-8375-4d2b-be05-3b7b6b697664\" (UID: \"a627c392-8375-4d2b-be05-3b7b6b697664\") " Sep 29 19:00:03 crc kubenswrapper[4792]: I0929 19:00:03.692809 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/a627c392-8375-4d2b-be05-3b7b6b697664-config-volume\") pod \"a627c392-8375-4d2b-be05-3b7b6b697664\" (UID: \"a627c392-8375-4d2b-be05-3b7b6b697664\") " Sep 29 19:00:03 crc kubenswrapper[4792]: I0929 19:00:03.692954 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-47v8n\" (UniqueName: \"kubernetes.io/projected/a627c392-8375-4d2b-be05-3b7b6b697664-kube-api-access-47v8n\") pod \"a627c392-8375-4d2b-be05-3b7b6b697664\" (UID: \"a627c392-8375-4d2b-be05-3b7b6b697664\") " Sep 29 19:00:03 crc kubenswrapper[4792]: I0929 19:00:03.694577 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a627c392-8375-4d2b-be05-3b7b6b697664-config-volume" (OuterVolumeSpecName: "config-volume") pod "a627c392-8375-4d2b-be05-3b7b6b697664" (UID: "a627c392-8375-4d2b-be05-3b7b6b697664"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:00:03 crc kubenswrapper[4792]: I0929 19:00:03.708711 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a627c392-8375-4d2b-be05-3b7b6b697664-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "a627c392-8375-4d2b-be05-3b7b6b697664" (UID: "a627c392-8375-4d2b-be05-3b7b6b697664"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:00:03 crc kubenswrapper[4792]: I0929 19:00:03.712048 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a627c392-8375-4d2b-be05-3b7b6b697664-kube-api-access-47v8n" (OuterVolumeSpecName: "kube-api-access-47v8n") pod "a627c392-8375-4d2b-be05-3b7b6b697664" (UID: "a627c392-8375-4d2b-be05-3b7b6b697664"). InnerVolumeSpecName "kube-api-access-47v8n". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:00:03 crc kubenswrapper[4792]: I0929 19:00:03.756450 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-7kmj2" Sep 29 19:00:03 crc kubenswrapper[4792]: I0929 19:00:03.794549 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-47v8n\" (UniqueName: \"kubernetes.io/projected/a627c392-8375-4d2b-be05-3b7b6b697664-kube-api-access-47v8n\") on node \"crc\" DevicePath \"\"" Sep 29 19:00:03 crc kubenswrapper[4792]: I0929 19:00:03.794578 4792 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/a627c392-8375-4d2b-be05-3b7b6b697664-secret-volume\") on node \"crc\" DevicePath \"\"" Sep 29 19:00:03 crc kubenswrapper[4792]: I0929 19:00:03.794588 4792 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/a627c392-8375-4d2b-be05-3b7b6b697664-config-volume\") on node \"crc\" DevicePath \"\"" Sep 29 19:00:03 crc kubenswrapper[4792]: I0929 19:00:03.895699 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cvplj\" (UniqueName: \"kubernetes.io/projected/b2e3cd89-1359-4a18-ade6-05cba0c68e70-kube-api-access-cvplj\") pod \"b2e3cd89-1359-4a18-ade6-05cba0c68e70\" (UID: \"b2e3cd89-1359-4a18-ade6-05cba0c68e70\") " Sep 29 19:00:03 crc kubenswrapper[4792]: I0929 19:00:03.895869 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b2e3cd89-1359-4a18-ade6-05cba0c68e70-utilities\") pod \"b2e3cd89-1359-4a18-ade6-05cba0c68e70\" (UID: \"b2e3cd89-1359-4a18-ade6-05cba0c68e70\") " Sep 29 19:00:03 crc kubenswrapper[4792]: I0929 19:00:03.895919 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b2e3cd89-1359-4a18-ade6-05cba0c68e70-catalog-content\") pod \"b2e3cd89-1359-4a18-ade6-05cba0c68e70\" (UID: \"b2e3cd89-1359-4a18-ade6-05cba0c68e70\") " Sep 29 19:00:03 crc kubenswrapper[4792]: I0929 19:00:03.896796 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b2e3cd89-1359-4a18-ade6-05cba0c68e70-utilities" (OuterVolumeSpecName: "utilities") pod "b2e3cd89-1359-4a18-ade6-05cba0c68e70" (UID: "b2e3cd89-1359-4a18-ade6-05cba0c68e70"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 19:00:03 crc kubenswrapper[4792]: I0929 19:00:03.898443 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b2e3cd89-1359-4a18-ade6-05cba0c68e70-kube-api-access-cvplj" (OuterVolumeSpecName: "kube-api-access-cvplj") pod "b2e3cd89-1359-4a18-ade6-05cba0c68e70" (UID: "b2e3cd89-1359-4a18-ade6-05cba0c68e70"). InnerVolumeSpecName "kube-api-access-cvplj". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:00:03 crc kubenswrapper[4792]: I0929 19:00:03.912633 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b2e3cd89-1359-4a18-ade6-05cba0c68e70-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b2e3cd89-1359-4a18-ade6-05cba0c68e70" (UID: "b2e3cd89-1359-4a18-ade6-05cba0c68e70"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 19:00:03 crc kubenswrapper[4792]: I0929 19:00:03.997980 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cvplj\" (UniqueName: \"kubernetes.io/projected/b2e3cd89-1359-4a18-ade6-05cba0c68e70-kube-api-access-cvplj\") on node \"crc\" DevicePath \"\"" Sep 29 19:00:03 crc kubenswrapper[4792]: I0929 19:00:03.998026 4792 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b2e3cd89-1359-4a18-ade6-05cba0c68e70-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 19:00:03 crc kubenswrapper[4792]: I0929 19:00:03.998039 4792 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b2e3cd89-1359-4a18-ade6-05cba0c68e70-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 19:00:04 crc kubenswrapper[4792]: I0929 19:00:04.308338 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29319540-7hrng" event={"ID":"a627c392-8375-4d2b-be05-3b7b6b697664","Type":"ContainerDied","Data":"5c6ab101208f49f9b96ffb05fb527dbbfa621e33dfe40ad862a2331fa0fc5413"} Sep 29 19:00:04 crc kubenswrapper[4792]: I0929 19:00:04.308373 4792 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5c6ab101208f49f9b96ffb05fb527dbbfa621e33dfe40ad862a2331fa0fc5413" Sep 29 19:00:04 crc kubenswrapper[4792]: I0929 19:00:04.309348 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319540-7hrng" Sep 29 19:00:04 crc kubenswrapper[4792]: I0929 19:00:04.312380 4792 generic.go:334] "Generic (PLEG): container finished" podID="b2e3cd89-1359-4a18-ade6-05cba0c68e70" containerID="4e63c8d14698ad77f2572062f7da73842bc372475aca7e44a3fa5fc59e26f79a" exitCode=0 Sep 29 19:00:04 crc kubenswrapper[4792]: I0929 19:00:04.312420 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-7kmj2" event={"ID":"b2e3cd89-1359-4a18-ade6-05cba0c68e70","Type":"ContainerDied","Data":"4e63c8d14698ad77f2572062f7da73842bc372475aca7e44a3fa5fc59e26f79a"} Sep 29 19:00:04 crc kubenswrapper[4792]: I0929 19:00:04.312438 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-7kmj2" Sep 29 19:00:04 crc kubenswrapper[4792]: I0929 19:00:04.312447 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-7kmj2" event={"ID":"b2e3cd89-1359-4a18-ade6-05cba0c68e70","Type":"ContainerDied","Data":"26c87749a50f21fa161637662e76dd927114119bfb872e64fb3c4543ca9e51a4"} Sep 29 19:00:04 crc kubenswrapper[4792]: I0929 19:00:04.312465 4792 scope.go:117] "RemoveContainer" containerID="4e63c8d14698ad77f2572062f7da73842bc372475aca7e44a3fa5fc59e26f79a" Sep 29 19:00:04 crc kubenswrapper[4792]: I0929 19:00:04.330094 4792 scope.go:117] "RemoveContainer" containerID="7c485859f74999aacd17b38df972404a409e4a8f03dff3c4306d78cb325201b0" Sep 29 19:00:04 crc kubenswrapper[4792]: I0929 19:00:04.337129 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-7kmj2"] Sep 29 19:00:04 crc kubenswrapper[4792]: I0929 19:00:04.341396 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-7kmj2"] Sep 29 19:00:04 crc kubenswrapper[4792]: I0929 19:00:04.352958 4792 scope.go:117] "RemoveContainer" containerID="cdfe5618632f54aaf2044d09d31cc1ab863212fc51298b2b74d5df87b8e7d0a5" Sep 29 19:00:04 crc kubenswrapper[4792]: I0929 19:00:04.367995 4792 scope.go:117] "RemoveContainer" containerID="4e63c8d14698ad77f2572062f7da73842bc372475aca7e44a3fa5fc59e26f79a" Sep 29 19:00:04 crc kubenswrapper[4792]: E0929 19:00:04.368315 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4e63c8d14698ad77f2572062f7da73842bc372475aca7e44a3fa5fc59e26f79a\": container with ID starting with 4e63c8d14698ad77f2572062f7da73842bc372475aca7e44a3fa5fc59e26f79a not found: ID does not exist" containerID="4e63c8d14698ad77f2572062f7da73842bc372475aca7e44a3fa5fc59e26f79a" Sep 29 19:00:04 crc kubenswrapper[4792]: I0929 19:00:04.368354 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4e63c8d14698ad77f2572062f7da73842bc372475aca7e44a3fa5fc59e26f79a"} err="failed to get container status \"4e63c8d14698ad77f2572062f7da73842bc372475aca7e44a3fa5fc59e26f79a\": rpc error: code = NotFound desc = could not find container \"4e63c8d14698ad77f2572062f7da73842bc372475aca7e44a3fa5fc59e26f79a\": container with ID starting with 4e63c8d14698ad77f2572062f7da73842bc372475aca7e44a3fa5fc59e26f79a not found: ID does not exist" Sep 29 19:00:04 crc kubenswrapper[4792]: I0929 19:00:04.368381 4792 scope.go:117] "RemoveContainer" containerID="7c485859f74999aacd17b38df972404a409e4a8f03dff3c4306d78cb325201b0" Sep 29 19:00:04 crc kubenswrapper[4792]: E0929 19:00:04.368587 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7c485859f74999aacd17b38df972404a409e4a8f03dff3c4306d78cb325201b0\": container with ID starting with 7c485859f74999aacd17b38df972404a409e4a8f03dff3c4306d78cb325201b0 not found: ID does not exist" containerID="7c485859f74999aacd17b38df972404a409e4a8f03dff3c4306d78cb325201b0" Sep 29 19:00:04 crc kubenswrapper[4792]: I0929 19:00:04.368612 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7c485859f74999aacd17b38df972404a409e4a8f03dff3c4306d78cb325201b0"} err="failed to get container status \"7c485859f74999aacd17b38df972404a409e4a8f03dff3c4306d78cb325201b0\": rpc error: code = NotFound desc = could not find container \"7c485859f74999aacd17b38df972404a409e4a8f03dff3c4306d78cb325201b0\": container with ID starting with 7c485859f74999aacd17b38df972404a409e4a8f03dff3c4306d78cb325201b0 not found: ID does not exist" Sep 29 19:00:04 crc kubenswrapper[4792]: I0929 19:00:04.368629 4792 scope.go:117] "RemoveContainer" containerID="cdfe5618632f54aaf2044d09d31cc1ab863212fc51298b2b74d5df87b8e7d0a5" Sep 29 19:00:04 crc kubenswrapper[4792]: E0929 19:00:04.368806 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cdfe5618632f54aaf2044d09d31cc1ab863212fc51298b2b74d5df87b8e7d0a5\": container with ID starting with cdfe5618632f54aaf2044d09d31cc1ab863212fc51298b2b74d5df87b8e7d0a5 not found: ID does not exist" containerID="cdfe5618632f54aaf2044d09d31cc1ab863212fc51298b2b74d5df87b8e7d0a5" Sep 29 19:00:04 crc kubenswrapper[4792]: I0929 19:00:04.368828 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cdfe5618632f54aaf2044d09d31cc1ab863212fc51298b2b74d5df87b8e7d0a5"} err="failed to get container status \"cdfe5618632f54aaf2044d09d31cc1ab863212fc51298b2b74d5df87b8e7d0a5\": rpc error: code = NotFound desc = could not find container \"cdfe5618632f54aaf2044d09d31cc1ab863212fc51298b2b74d5df87b8e7d0a5\": container with ID starting with cdfe5618632f54aaf2044d09d31cc1ab863212fc51298b2b74d5df87b8e7d0a5 not found: ID does not exist" Sep 29 19:00:05 crc kubenswrapper[4792]: I0929 19:00:05.021519 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b2e3cd89-1359-4a18-ade6-05cba0c68e70" path="/var/lib/kubelet/pods/b2e3cd89-1359-4a18-ade6-05cba0c68e70/volumes" Sep 29 19:00:09 crc kubenswrapper[4792]: I0929 19:00:09.196179 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-grgxc" Sep 29 19:00:09 crc kubenswrapper[4792]: I0929 19:00:09.236149 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-grgxc"] Sep 29 19:00:09 crc kubenswrapper[4792]: I0929 19:00:09.339201 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-grgxc" podUID="51c4e50f-73c5-485d-a704-b0454f9cfde5" containerName="registry-server" containerID="cri-o://8e1f5a50f6e641b40ac198f581c022665d8ce57d3af67e90064fe226847af085" gracePeriod=2 Sep 29 19:00:09 crc kubenswrapper[4792]: I0929 19:00:09.355178 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-4pj4c"] Sep 29 19:00:09 crc kubenswrapper[4792]: I0929 19:00:09.481392 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-wmjgp" Sep 29 19:00:10 crc kubenswrapper[4792]: I0929 19:00:10.245604 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-grgxc" Sep 29 19:00:10 crc kubenswrapper[4792]: I0929 19:00:10.345597 4792 generic.go:334] "Generic (PLEG): container finished" podID="51c4e50f-73c5-485d-a704-b0454f9cfde5" containerID="8e1f5a50f6e641b40ac198f581c022665d8ce57d3af67e90064fe226847af085" exitCode=0 Sep 29 19:00:10 crc kubenswrapper[4792]: I0929 19:00:10.345877 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-grgxc" event={"ID":"51c4e50f-73c5-485d-a704-b0454f9cfde5","Type":"ContainerDied","Data":"8e1f5a50f6e641b40ac198f581c022665d8ce57d3af67e90064fe226847af085"} Sep 29 19:00:10 crc kubenswrapper[4792]: I0929 19:00:10.346001 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-grgxc" event={"ID":"51c4e50f-73c5-485d-a704-b0454f9cfde5","Type":"ContainerDied","Data":"5a942834ccbc865ddd4d0876c48783a5459ba21b2c7056a68dd231c0cf62c6ca"} Sep 29 19:00:10 crc kubenswrapper[4792]: I0929 19:00:10.346097 4792 scope.go:117] "RemoveContainer" containerID="8e1f5a50f6e641b40ac198f581c022665d8ce57d3af67e90064fe226847af085" Sep 29 19:00:10 crc kubenswrapper[4792]: I0929 19:00:10.346293 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-grgxc" Sep 29 19:00:10 crc kubenswrapper[4792]: I0929 19:00:10.361721 4792 scope.go:117] "RemoveContainer" containerID="d9a09dfcad17708a28569b778dee045de64c9006cf82f99b0820471799201639" Sep 29 19:00:10 crc kubenswrapper[4792]: I0929 19:00:10.375870 4792 scope.go:117] "RemoveContainer" containerID="1321241ef1cca6d5e01a10a972d27b19126ea080bf886b84df214af966d29482" Sep 29 19:00:10 crc kubenswrapper[4792]: I0929 19:00:10.382629 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qjgzc\" (UniqueName: \"kubernetes.io/projected/51c4e50f-73c5-485d-a704-b0454f9cfde5-kube-api-access-qjgzc\") pod \"51c4e50f-73c5-485d-a704-b0454f9cfde5\" (UID: \"51c4e50f-73c5-485d-a704-b0454f9cfde5\") " Sep 29 19:00:10 crc kubenswrapper[4792]: I0929 19:00:10.382683 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/51c4e50f-73c5-485d-a704-b0454f9cfde5-utilities\") pod \"51c4e50f-73c5-485d-a704-b0454f9cfde5\" (UID: \"51c4e50f-73c5-485d-a704-b0454f9cfde5\") " Sep 29 19:00:10 crc kubenswrapper[4792]: I0929 19:00:10.382709 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/51c4e50f-73c5-485d-a704-b0454f9cfde5-catalog-content\") pod \"51c4e50f-73c5-485d-a704-b0454f9cfde5\" (UID: \"51c4e50f-73c5-485d-a704-b0454f9cfde5\") " Sep 29 19:00:10 crc kubenswrapper[4792]: I0929 19:00:10.383386 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/51c4e50f-73c5-485d-a704-b0454f9cfde5-utilities" (OuterVolumeSpecName: "utilities") pod "51c4e50f-73c5-485d-a704-b0454f9cfde5" (UID: "51c4e50f-73c5-485d-a704-b0454f9cfde5"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 19:00:10 crc kubenswrapper[4792]: I0929 19:00:10.388610 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/51c4e50f-73c5-485d-a704-b0454f9cfde5-kube-api-access-qjgzc" (OuterVolumeSpecName: "kube-api-access-qjgzc") pod "51c4e50f-73c5-485d-a704-b0454f9cfde5" (UID: "51c4e50f-73c5-485d-a704-b0454f9cfde5"). InnerVolumeSpecName "kube-api-access-qjgzc". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:00:10 crc kubenswrapper[4792]: I0929 19:00:10.397058 4792 scope.go:117] "RemoveContainer" containerID="8e1f5a50f6e641b40ac198f581c022665d8ce57d3af67e90064fe226847af085" Sep 29 19:00:10 crc kubenswrapper[4792]: E0929 19:00:10.397733 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8e1f5a50f6e641b40ac198f581c022665d8ce57d3af67e90064fe226847af085\": container with ID starting with 8e1f5a50f6e641b40ac198f581c022665d8ce57d3af67e90064fe226847af085 not found: ID does not exist" containerID="8e1f5a50f6e641b40ac198f581c022665d8ce57d3af67e90064fe226847af085" Sep 29 19:00:10 crc kubenswrapper[4792]: I0929 19:00:10.397854 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8e1f5a50f6e641b40ac198f581c022665d8ce57d3af67e90064fe226847af085"} err="failed to get container status \"8e1f5a50f6e641b40ac198f581c022665d8ce57d3af67e90064fe226847af085\": rpc error: code = NotFound desc = could not find container \"8e1f5a50f6e641b40ac198f581c022665d8ce57d3af67e90064fe226847af085\": container with ID starting with 8e1f5a50f6e641b40ac198f581c022665d8ce57d3af67e90064fe226847af085 not found: ID does not exist" Sep 29 19:00:10 crc kubenswrapper[4792]: I0929 19:00:10.397954 4792 scope.go:117] "RemoveContainer" containerID="d9a09dfcad17708a28569b778dee045de64c9006cf82f99b0820471799201639" Sep 29 19:00:10 crc kubenswrapper[4792]: E0929 19:00:10.398228 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d9a09dfcad17708a28569b778dee045de64c9006cf82f99b0820471799201639\": container with ID starting with d9a09dfcad17708a28569b778dee045de64c9006cf82f99b0820471799201639 not found: ID does not exist" containerID="d9a09dfcad17708a28569b778dee045de64c9006cf82f99b0820471799201639" Sep 29 19:00:10 crc kubenswrapper[4792]: I0929 19:00:10.398312 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d9a09dfcad17708a28569b778dee045de64c9006cf82f99b0820471799201639"} err="failed to get container status \"d9a09dfcad17708a28569b778dee045de64c9006cf82f99b0820471799201639\": rpc error: code = NotFound desc = could not find container \"d9a09dfcad17708a28569b778dee045de64c9006cf82f99b0820471799201639\": container with ID starting with d9a09dfcad17708a28569b778dee045de64c9006cf82f99b0820471799201639 not found: ID does not exist" Sep 29 19:00:10 crc kubenswrapper[4792]: I0929 19:00:10.398390 4792 scope.go:117] "RemoveContainer" containerID="1321241ef1cca6d5e01a10a972d27b19126ea080bf886b84df214af966d29482" Sep 29 19:00:10 crc kubenswrapper[4792]: E0929 19:00:10.398716 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1321241ef1cca6d5e01a10a972d27b19126ea080bf886b84df214af966d29482\": container with ID starting with 1321241ef1cca6d5e01a10a972d27b19126ea080bf886b84df214af966d29482 not found: ID does not exist" containerID="1321241ef1cca6d5e01a10a972d27b19126ea080bf886b84df214af966d29482" Sep 29 19:00:10 crc kubenswrapper[4792]: I0929 19:00:10.398757 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1321241ef1cca6d5e01a10a972d27b19126ea080bf886b84df214af966d29482"} err="failed to get container status \"1321241ef1cca6d5e01a10a972d27b19126ea080bf886b84df214af966d29482\": rpc error: code = NotFound desc = could not find container \"1321241ef1cca6d5e01a10a972d27b19126ea080bf886b84df214af966d29482\": container with ID starting with 1321241ef1cca6d5e01a10a972d27b19126ea080bf886b84df214af966d29482 not found: ID does not exist" Sep 29 19:00:10 crc kubenswrapper[4792]: I0929 19:00:10.434247 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/51c4e50f-73c5-485d-a704-b0454f9cfde5-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "51c4e50f-73c5-485d-a704-b0454f9cfde5" (UID: "51c4e50f-73c5-485d-a704-b0454f9cfde5"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 19:00:10 crc kubenswrapper[4792]: I0929 19:00:10.484388 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qjgzc\" (UniqueName: \"kubernetes.io/projected/51c4e50f-73c5-485d-a704-b0454f9cfde5-kube-api-access-qjgzc\") on node \"crc\" DevicePath \"\"" Sep 29 19:00:10 crc kubenswrapper[4792]: I0929 19:00:10.484660 4792 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/51c4e50f-73c5-485d-a704-b0454f9cfde5-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 19:00:10 crc kubenswrapper[4792]: I0929 19:00:10.484729 4792 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/51c4e50f-73c5-485d-a704-b0454f9cfde5-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 19:00:10 crc kubenswrapper[4792]: I0929 19:00:10.675577 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-grgxc"] Sep 29 19:00:10 crc kubenswrapper[4792]: I0929 19:00:10.679468 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-grgxc"] Sep 29 19:00:11 crc kubenswrapper[4792]: I0929 19:00:11.022374 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="51c4e50f-73c5-485d-a704-b0454f9cfde5" path="/var/lib/kubelet/pods/51c4e50f-73c5-485d-a704-b0454f9cfde5/volumes" Sep 29 19:00:11 crc kubenswrapper[4792]: I0929 19:00:11.959607 4792 patch_prober.go:28] interesting pod/machine-config-daemon-p5q59 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 19:00:11 crc kubenswrapper[4792]: I0929 19:00:11.959910 4792 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" podUID="0ae66548-086e-4ca9-bd6f-281ce46e7557" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 19:00:11 crc kubenswrapper[4792]: I0929 19:00:11.959949 4792 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" Sep 29 19:00:11 crc kubenswrapper[4792]: I0929 19:00:11.960423 4792 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"305645f1f10b20984067c3d0d32bc9a5936e191faecff2bb494be005fc471c65"} pod="openshift-machine-config-operator/machine-config-daemon-p5q59" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 29 19:00:11 crc kubenswrapper[4792]: I0929 19:00:11.960465 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" podUID="0ae66548-086e-4ca9-bd6f-281ce46e7557" containerName="machine-config-daemon" containerID="cri-o://305645f1f10b20984067c3d0d32bc9a5936e191faecff2bb494be005fc471c65" gracePeriod=600 Sep 29 19:00:12 crc kubenswrapper[4792]: I0929 19:00:12.357961 4792 generic.go:334] "Generic (PLEG): container finished" podID="0ae66548-086e-4ca9-bd6f-281ce46e7557" containerID="305645f1f10b20984067c3d0d32bc9a5936e191faecff2bb494be005fc471c65" exitCode=0 Sep 29 19:00:12 crc kubenswrapper[4792]: I0929 19:00:12.358042 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" event={"ID":"0ae66548-086e-4ca9-bd6f-281ce46e7557","Type":"ContainerDied","Data":"305645f1f10b20984067c3d0d32bc9a5936e191faecff2bb494be005fc471c65"} Sep 29 19:00:12 crc kubenswrapper[4792]: I0929 19:00:12.358607 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" event={"ID":"0ae66548-086e-4ca9-bd6f-281ce46e7557","Type":"ContainerStarted","Data":"7d8c0b7ec3035efa9edf9d9b2ba12dabade2b1415013394067aca0438b434980"} Sep 29 19:00:34 crc kubenswrapper[4792]: I0929 19:00:34.386376 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-authentication/oauth-openshift-558db77b4-4pj4c" podUID="56d0b0e8-6440-4f28-9d05-ad7be713a117" containerName="oauth-openshift" containerID="cri-o://c1ce633d2a2c5a4cb32631b665a6a6c0afb59e75fbeb0b551836d22a9e28724f" gracePeriod=15 Sep 29 19:00:35 crc kubenswrapper[4792]: I0929 19:00:35.298988 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-4pj4c" Sep 29 19:00:35 crc kubenswrapper[4792]: I0929 19:00:35.346144 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication/oauth-openshift-79d78bfd77-4csmb"] Sep 29 19:00:35 crc kubenswrapper[4792]: E0929 19:00:35.346569 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b2e3cd89-1359-4a18-ade6-05cba0c68e70" containerName="registry-server" Sep 29 19:00:35 crc kubenswrapper[4792]: I0929 19:00:35.346673 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="b2e3cd89-1359-4a18-ade6-05cba0c68e70" containerName="registry-server" Sep 29 19:00:35 crc kubenswrapper[4792]: E0929 19:00:35.346765 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="51c4e50f-73c5-485d-a704-b0454f9cfde5" containerName="registry-server" Sep 29 19:00:35 crc kubenswrapper[4792]: I0929 19:00:35.346874 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="51c4e50f-73c5-485d-a704-b0454f9cfde5" containerName="registry-server" Sep 29 19:00:35 crc kubenswrapper[4792]: E0929 19:00:35.346961 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="51c4e50f-73c5-485d-a704-b0454f9cfde5" containerName="extract-content" Sep 29 19:00:35 crc kubenswrapper[4792]: I0929 19:00:35.347033 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="51c4e50f-73c5-485d-a704-b0454f9cfde5" containerName="extract-content" Sep 29 19:00:35 crc kubenswrapper[4792]: E0929 19:00:35.347089 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="51c4e50f-73c5-485d-a704-b0454f9cfde5" containerName="extract-utilities" Sep 29 19:00:35 crc kubenswrapper[4792]: I0929 19:00:35.347161 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="51c4e50f-73c5-485d-a704-b0454f9cfde5" containerName="extract-utilities" Sep 29 19:00:35 crc kubenswrapper[4792]: E0929 19:00:35.347233 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="56d0b0e8-6440-4f28-9d05-ad7be713a117" containerName="oauth-openshift" Sep 29 19:00:35 crc kubenswrapper[4792]: I0929 19:00:35.347308 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="56d0b0e8-6440-4f28-9d05-ad7be713a117" containerName="oauth-openshift" Sep 29 19:00:35 crc kubenswrapper[4792]: E0929 19:00:35.347364 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b2e3cd89-1359-4a18-ade6-05cba0c68e70" containerName="extract-utilities" Sep 29 19:00:35 crc kubenswrapper[4792]: I0929 19:00:35.347434 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="b2e3cd89-1359-4a18-ade6-05cba0c68e70" containerName="extract-utilities" Sep 29 19:00:35 crc kubenswrapper[4792]: E0929 19:00:35.347572 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b2e3cd89-1359-4a18-ade6-05cba0c68e70" containerName="extract-content" Sep 29 19:00:35 crc kubenswrapper[4792]: I0929 19:00:35.347656 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="b2e3cd89-1359-4a18-ade6-05cba0c68e70" containerName="extract-content" Sep 29 19:00:35 crc kubenswrapper[4792]: E0929 19:00:35.347733 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a627c392-8375-4d2b-be05-3b7b6b697664" containerName="collect-profiles" Sep 29 19:00:35 crc kubenswrapper[4792]: I0929 19:00:35.347809 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="a627c392-8375-4d2b-be05-3b7b6b697664" containerName="collect-profiles" Sep 29 19:00:35 crc kubenswrapper[4792]: I0929 19:00:35.348092 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="b2e3cd89-1359-4a18-ade6-05cba0c68e70" containerName="registry-server" Sep 29 19:00:35 crc kubenswrapper[4792]: I0929 19:00:35.348173 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="56d0b0e8-6440-4f28-9d05-ad7be713a117" containerName="oauth-openshift" Sep 29 19:00:35 crc kubenswrapper[4792]: I0929 19:00:35.348247 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="51c4e50f-73c5-485d-a704-b0454f9cfde5" containerName="registry-server" Sep 29 19:00:35 crc kubenswrapper[4792]: I0929 19:00:35.348329 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="a627c392-8375-4d2b-be05-3b7b6b697664" containerName="collect-profiles" Sep 29 19:00:35 crc kubenswrapper[4792]: I0929 19:00:35.348867 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-79d78bfd77-4csmb" Sep 29 19:00:35 crc kubenswrapper[4792]: I0929 19:00:35.377367 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-79d78bfd77-4csmb"] Sep 29 19:00:35 crc kubenswrapper[4792]: I0929 19:00:35.404827 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/56d0b0e8-6440-4f28-9d05-ad7be713a117-audit-dir\") pod \"56d0b0e8-6440-4f28-9d05-ad7be713a117\" (UID: \"56d0b0e8-6440-4f28-9d05-ad7be713a117\") " Sep 29 19:00:35 crc kubenswrapper[4792]: I0929 19:00:35.404902 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/56d0b0e8-6440-4f28-9d05-ad7be713a117-v4-0-config-user-idp-0-file-data\") pod \"56d0b0e8-6440-4f28-9d05-ad7be713a117\" (UID: \"56d0b0e8-6440-4f28-9d05-ad7be713a117\") " Sep 29 19:00:35 crc kubenswrapper[4792]: I0929 19:00:35.404923 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/56d0b0e8-6440-4f28-9d05-ad7be713a117-v4-0-config-user-template-error\") pod \"56d0b0e8-6440-4f28-9d05-ad7be713a117\" (UID: \"56d0b0e8-6440-4f28-9d05-ad7be713a117\") " Sep 29 19:00:35 crc kubenswrapper[4792]: I0929 19:00:35.404934 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/56d0b0e8-6440-4f28-9d05-ad7be713a117-audit-dir" (OuterVolumeSpecName: "audit-dir") pod "56d0b0e8-6440-4f28-9d05-ad7be713a117" (UID: "56d0b0e8-6440-4f28-9d05-ad7be713a117"). InnerVolumeSpecName "audit-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 29 19:00:35 crc kubenswrapper[4792]: I0929 19:00:35.404983 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/56d0b0e8-6440-4f28-9d05-ad7be713a117-v4-0-config-user-template-provider-selection\") pod \"56d0b0e8-6440-4f28-9d05-ad7be713a117\" (UID: \"56d0b0e8-6440-4f28-9d05-ad7be713a117\") " Sep 29 19:00:35 crc kubenswrapper[4792]: I0929 19:00:35.405036 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/56d0b0e8-6440-4f28-9d05-ad7be713a117-v4-0-config-system-service-ca\") pod \"56d0b0e8-6440-4f28-9d05-ad7be713a117\" (UID: \"56d0b0e8-6440-4f28-9d05-ad7be713a117\") " Sep 29 19:00:35 crc kubenswrapper[4792]: I0929 19:00:35.405056 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/56d0b0e8-6440-4f28-9d05-ad7be713a117-v4-0-config-system-cliconfig\") pod \"56d0b0e8-6440-4f28-9d05-ad7be713a117\" (UID: \"56d0b0e8-6440-4f28-9d05-ad7be713a117\") " Sep 29 19:00:35 crc kubenswrapper[4792]: I0929 19:00:35.405076 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/56d0b0e8-6440-4f28-9d05-ad7be713a117-v4-0-config-system-serving-cert\") pod \"56d0b0e8-6440-4f28-9d05-ad7be713a117\" (UID: \"56d0b0e8-6440-4f28-9d05-ad7be713a117\") " Sep 29 19:00:35 crc kubenswrapper[4792]: I0929 19:00:35.405134 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/56d0b0e8-6440-4f28-9d05-ad7be713a117-v4-0-config-user-template-login\") pod \"56d0b0e8-6440-4f28-9d05-ad7be713a117\" (UID: \"56d0b0e8-6440-4f28-9d05-ad7be713a117\") " Sep 29 19:00:35 crc kubenswrapper[4792]: I0929 19:00:35.405151 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/56d0b0e8-6440-4f28-9d05-ad7be713a117-audit-policies\") pod \"56d0b0e8-6440-4f28-9d05-ad7be713a117\" (UID: \"56d0b0e8-6440-4f28-9d05-ad7be713a117\") " Sep 29 19:00:35 crc kubenswrapper[4792]: I0929 19:00:35.405168 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/56d0b0e8-6440-4f28-9d05-ad7be713a117-v4-0-config-system-ocp-branding-template\") pod \"56d0b0e8-6440-4f28-9d05-ad7be713a117\" (UID: \"56d0b0e8-6440-4f28-9d05-ad7be713a117\") " Sep 29 19:00:35 crc kubenswrapper[4792]: I0929 19:00:35.405206 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mttjt\" (UniqueName: \"kubernetes.io/projected/56d0b0e8-6440-4f28-9d05-ad7be713a117-kube-api-access-mttjt\") pod \"56d0b0e8-6440-4f28-9d05-ad7be713a117\" (UID: \"56d0b0e8-6440-4f28-9d05-ad7be713a117\") " Sep 29 19:00:35 crc kubenswrapper[4792]: I0929 19:00:35.405238 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/56d0b0e8-6440-4f28-9d05-ad7be713a117-v4-0-config-system-trusted-ca-bundle\") pod \"56d0b0e8-6440-4f28-9d05-ad7be713a117\" (UID: \"56d0b0e8-6440-4f28-9d05-ad7be713a117\") " Sep 29 19:00:35 crc kubenswrapper[4792]: I0929 19:00:35.405306 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/56d0b0e8-6440-4f28-9d05-ad7be713a117-v4-0-config-system-session\") pod \"56d0b0e8-6440-4f28-9d05-ad7be713a117\" (UID: \"56d0b0e8-6440-4f28-9d05-ad7be713a117\") " Sep 29 19:00:35 crc kubenswrapper[4792]: I0929 19:00:35.405347 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/56d0b0e8-6440-4f28-9d05-ad7be713a117-v4-0-config-system-router-certs\") pod \"56d0b0e8-6440-4f28-9d05-ad7be713a117\" (UID: \"56d0b0e8-6440-4f28-9d05-ad7be713a117\") " Sep 29 19:00:35 crc kubenswrapper[4792]: I0929 19:00:35.405485 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/d471648d-735a-41d9-aad1-151a706f8fb1-v4-0-config-system-router-certs\") pod \"oauth-openshift-79d78bfd77-4csmb\" (UID: \"d471648d-735a-41d9-aad1-151a706f8fb1\") " pod="openshift-authentication/oauth-openshift-79d78bfd77-4csmb" Sep 29 19:00:35 crc kubenswrapper[4792]: I0929 19:00:35.405531 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/d471648d-735a-41d9-aad1-151a706f8fb1-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-79d78bfd77-4csmb\" (UID: \"d471648d-735a-41d9-aad1-151a706f8fb1\") " pod="openshift-authentication/oauth-openshift-79d78bfd77-4csmb" Sep 29 19:00:35 crc kubenswrapper[4792]: I0929 19:00:35.405556 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/d471648d-735a-41d9-aad1-151a706f8fb1-v4-0-config-user-template-login\") pod \"oauth-openshift-79d78bfd77-4csmb\" (UID: \"d471648d-735a-41d9-aad1-151a706f8fb1\") " pod="openshift-authentication/oauth-openshift-79d78bfd77-4csmb" Sep 29 19:00:35 crc kubenswrapper[4792]: I0929 19:00:35.405605 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/d471648d-735a-41d9-aad1-151a706f8fb1-v4-0-config-system-serving-cert\") pod \"oauth-openshift-79d78bfd77-4csmb\" (UID: \"d471648d-735a-41d9-aad1-151a706f8fb1\") " pod="openshift-authentication/oauth-openshift-79d78bfd77-4csmb" Sep 29 19:00:35 crc kubenswrapper[4792]: I0929 19:00:35.405634 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/d471648d-735a-41d9-aad1-151a706f8fb1-v4-0-config-system-session\") pod \"oauth-openshift-79d78bfd77-4csmb\" (UID: \"d471648d-735a-41d9-aad1-151a706f8fb1\") " pod="openshift-authentication/oauth-openshift-79d78bfd77-4csmb" Sep 29 19:00:35 crc kubenswrapper[4792]: I0929 19:00:35.405693 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/d471648d-735a-41d9-aad1-151a706f8fb1-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-79d78bfd77-4csmb\" (UID: \"d471648d-735a-41d9-aad1-151a706f8fb1\") " pod="openshift-authentication/oauth-openshift-79d78bfd77-4csmb" Sep 29 19:00:35 crc kubenswrapper[4792]: I0929 19:00:35.405718 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/d471648d-735a-41d9-aad1-151a706f8fb1-v4-0-config-system-service-ca\") pod \"oauth-openshift-79d78bfd77-4csmb\" (UID: \"d471648d-735a-41d9-aad1-151a706f8fb1\") " pod="openshift-authentication/oauth-openshift-79d78bfd77-4csmb" Sep 29 19:00:35 crc kubenswrapper[4792]: I0929 19:00:35.405769 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/d471648d-735a-41d9-aad1-151a706f8fb1-v4-0-config-system-cliconfig\") pod \"oauth-openshift-79d78bfd77-4csmb\" (UID: \"d471648d-735a-41d9-aad1-151a706f8fb1\") " pod="openshift-authentication/oauth-openshift-79d78bfd77-4csmb" Sep 29 19:00:35 crc kubenswrapper[4792]: I0929 19:00:35.405799 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/d471648d-735a-41d9-aad1-151a706f8fb1-v4-0-config-user-template-error\") pod \"oauth-openshift-79d78bfd77-4csmb\" (UID: \"d471648d-735a-41d9-aad1-151a706f8fb1\") " pod="openshift-authentication/oauth-openshift-79d78bfd77-4csmb" Sep 29 19:00:35 crc kubenswrapper[4792]: I0929 19:00:35.405867 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/d471648d-735a-41d9-aad1-151a706f8fb1-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-79d78bfd77-4csmb\" (UID: \"d471648d-735a-41d9-aad1-151a706f8fb1\") " pod="openshift-authentication/oauth-openshift-79d78bfd77-4csmb" Sep 29 19:00:35 crc kubenswrapper[4792]: I0929 19:00:35.405887 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/d471648d-735a-41d9-aad1-151a706f8fb1-audit-policies\") pod \"oauth-openshift-79d78bfd77-4csmb\" (UID: \"d471648d-735a-41d9-aad1-151a706f8fb1\") " pod="openshift-authentication/oauth-openshift-79d78bfd77-4csmb" Sep 29 19:00:35 crc kubenswrapper[4792]: I0929 19:00:35.405911 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/d471648d-735a-41d9-aad1-151a706f8fb1-audit-dir\") pod \"oauth-openshift-79d78bfd77-4csmb\" (UID: \"d471648d-735a-41d9-aad1-151a706f8fb1\") " pod="openshift-authentication/oauth-openshift-79d78bfd77-4csmb" Sep 29 19:00:35 crc kubenswrapper[4792]: I0929 19:00:35.405949 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8vznk\" (UniqueName: \"kubernetes.io/projected/d471648d-735a-41d9-aad1-151a706f8fb1-kube-api-access-8vznk\") pod \"oauth-openshift-79d78bfd77-4csmb\" (UID: \"d471648d-735a-41d9-aad1-151a706f8fb1\") " pod="openshift-authentication/oauth-openshift-79d78bfd77-4csmb" Sep 29 19:00:35 crc kubenswrapper[4792]: I0929 19:00:35.405968 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/d471648d-735a-41d9-aad1-151a706f8fb1-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-79d78bfd77-4csmb\" (UID: \"d471648d-735a-41d9-aad1-151a706f8fb1\") " pod="openshift-authentication/oauth-openshift-79d78bfd77-4csmb" Sep 29 19:00:35 crc kubenswrapper[4792]: I0929 19:00:35.406042 4792 reconciler_common.go:293] "Volume detached for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/56d0b0e8-6440-4f28-9d05-ad7be713a117-audit-dir\") on node \"crc\" DevicePath \"\"" Sep 29 19:00:35 crc kubenswrapper[4792]: I0929 19:00:35.409644 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/56d0b0e8-6440-4f28-9d05-ad7be713a117-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "56d0b0e8-6440-4f28-9d05-ad7be713a117" (UID: "56d0b0e8-6440-4f28-9d05-ad7be713a117"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:00:35 crc kubenswrapper[4792]: I0929 19:00:35.410343 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/56d0b0e8-6440-4f28-9d05-ad7be713a117-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "56d0b0e8-6440-4f28-9d05-ad7be713a117" (UID: "56d0b0e8-6440-4f28-9d05-ad7be713a117"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:00:35 crc kubenswrapper[4792]: I0929 19:00:35.410705 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/56d0b0e8-6440-4f28-9d05-ad7be713a117-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "56d0b0e8-6440-4f28-9d05-ad7be713a117" (UID: "56d0b0e8-6440-4f28-9d05-ad7be713a117"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:00:35 crc kubenswrapper[4792]: I0929 19:00:35.410823 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/56d0b0e8-6440-4f28-9d05-ad7be713a117-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "56d0b0e8-6440-4f28-9d05-ad7be713a117" (UID: "56d0b0e8-6440-4f28-9d05-ad7be713a117"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:00:35 crc kubenswrapper[4792]: I0929 19:00:35.411115 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/56d0b0e8-6440-4f28-9d05-ad7be713a117-kube-api-access-mttjt" (OuterVolumeSpecName: "kube-api-access-mttjt") pod "56d0b0e8-6440-4f28-9d05-ad7be713a117" (UID: "56d0b0e8-6440-4f28-9d05-ad7be713a117"). InnerVolumeSpecName "kube-api-access-mttjt". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:00:35 crc kubenswrapper[4792]: I0929 19:00:35.411610 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/56d0b0e8-6440-4f28-9d05-ad7be713a117-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "56d0b0e8-6440-4f28-9d05-ad7be713a117" (UID: "56d0b0e8-6440-4f28-9d05-ad7be713a117"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:00:35 crc kubenswrapper[4792]: I0929 19:00:35.412907 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/56d0b0e8-6440-4f28-9d05-ad7be713a117-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "56d0b0e8-6440-4f28-9d05-ad7be713a117" (UID: "56d0b0e8-6440-4f28-9d05-ad7be713a117"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:00:35 crc kubenswrapper[4792]: I0929 19:00:35.413130 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/56d0b0e8-6440-4f28-9d05-ad7be713a117-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "56d0b0e8-6440-4f28-9d05-ad7be713a117" (UID: "56d0b0e8-6440-4f28-9d05-ad7be713a117"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:00:35 crc kubenswrapper[4792]: I0929 19:00:35.414670 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/56d0b0e8-6440-4f28-9d05-ad7be713a117-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "56d0b0e8-6440-4f28-9d05-ad7be713a117" (UID: "56d0b0e8-6440-4f28-9d05-ad7be713a117"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:00:35 crc kubenswrapper[4792]: I0929 19:00:35.414693 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/56d0b0e8-6440-4f28-9d05-ad7be713a117-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "56d0b0e8-6440-4f28-9d05-ad7be713a117" (UID: "56d0b0e8-6440-4f28-9d05-ad7be713a117"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:00:35 crc kubenswrapper[4792]: I0929 19:00:35.415209 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/56d0b0e8-6440-4f28-9d05-ad7be713a117-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "56d0b0e8-6440-4f28-9d05-ad7be713a117" (UID: "56d0b0e8-6440-4f28-9d05-ad7be713a117"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:00:35 crc kubenswrapper[4792]: I0929 19:00:35.422062 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/56d0b0e8-6440-4f28-9d05-ad7be713a117-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "56d0b0e8-6440-4f28-9d05-ad7be713a117" (UID: "56d0b0e8-6440-4f28-9d05-ad7be713a117"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:00:35 crc kubenswrapper[4792]: I0929 19:00:35.423162 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/56d0b0e8-6440-4f28-9d05-ad7be713a117-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "56d0b0e8-6440-4f28-9d05-ad7be713a117" (UID: "56d0b0e8-6440-4f28-9d05-ad7be713a117"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:00:35 crc kubenswrapper[4792]: I0929 19:00:35.495164 4792 generic.go:334] "Generic (PLEG): container finished" podID="56d0b0e8-6440-4f28-9d05-ad7be713a117" containerID="c1ce633d2a2c5a4cb32631b665a6a6c0afb59e75fbeb0b551836d22a9e28724f" exitCode=0 Sep 29 19:00:35 crc kubenswrapper[4792]: I0929 19:00:35.495225 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-4pj4c" event={"ID":"56d0b0e8-6440-4f28-9d05-ad7be713a117","Type":"ContainerDied","Data":"c1ce633d2a2c5a4cb32631b665a6a6c0afb59e75fbeb0b551836d22a9e28724f"} Sep 29 19:00:35 crc kubenswrapper[4792]: I0929 19:00:35.495263 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-4pj4c" event={"ID":"56d0b0e8-6440-4f28-9d05-ad7be713a117","Type":"ContainerDied","Data":"91928c38b1f99db8d01fc1f53f96029af51d61838019c92ae900dfd857d069ad"} Sep 29 19:00:35 crc kubenswrapper[4792]: I0929 19:00:35.495290 4792 scope.go:117] "RemoveContainer" containerID="c1ce633d2a2c5a4cb32631b665a6a6c0afb59e75fbeb0b551836d22a9e28724f" Sep 29 19:00:35 crc kubenswrapper[4792]: I0929 19:00:35.495440 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-4pj4c" Sep 29 19:00:35 crc kubenswrapper[4792]: I0929 19:00:35.507445 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/d471648d-735a-41d9-aad1-151a706f8fb1-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-79d78bfd77-4csmb\" (UID: \"d471648d-735a-41d9-aad1-151a706f8fb1\") " pod="openshift-authentication/oauth-openshift-79d78bfd77-4csmb" Sep 29 19:00:35 crc kubenswrapper[4792]: I0929 19:00:35.507503 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/d471648d-735a-41d9-aad1-151a706f8fb1-audit-policies\") pod \"oauth-openshift-79d78bfd77-4csmb\" (UID: \"d471648d-735a-41d9-aad1-151a706f8fb1\") " pod="openshift-authentication/oauth-openshift-79d78bfd77-4csmb" Sep 29 19:00:35 crc kubenswrapper[4792]: I0929 19:00:35.507531 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/d471648d-735a-41d9-aad1-151a706f8fb1-audit-dir\") pod \"oauth-openshift-79d78bfd77-4csmb\" (UID: \"d471648d-735a-41d9-aad1-151a706f8fb1\") " pod="openshift-authentication/oauth-openshift-79d78bfd77-4csmb" Sep 29 19:00:35 crc kubenswrapper[4792]: I0929 19:00:35.507550 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8vznk\" (UniqueName: \"kubernetes.io/projected/d471648d-735a-41d9-aad1-151a706f8fb1-kube-api-access-8vznk\") pod \"oauth-openshift-79d78bfd77-4csmb\" (UID: \"d471648d-735a-41d9-aad1-151a706f8fb1\") " pod="openshift-authentication/oauth-openshift-79d78bfd77-4csmb" Sep 29 19:00:35 crc kubenswrapper[4792]: I0929 19:00:35.507611 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/d471648d-735a-41d9-aad1-151a706f8fb1-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-79d78bfd77-4csmb\" (UID: \"d471648d-735a-41d9-aad1-151a706f8fb1\") " pod="openshift-authentication/oauth-openshift-79d78bfd77-4csmb" Sep 29 19:00:35 crc kubenswrapper[4792]: I0929 19:00:35.507631 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/d471648d-735a-41d9-aad1-151a706f8fb1-v4-0-config-system-router-certs\") pod \"oauth-openshift-79d78bfd77-4csmb\" (UID: \"d471648d-735a-41d9-aad1-151a706f8fb1\") " pod="openshift-authentication/oauth-openshift-79d78bfd77-4csmb" Sep 29 19:00:35 crc kubenswrapper[4792]: I0929 19:00:35.507651 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/d471648d-735a-41d9-aad1-151a706f8fb1-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-79d78bfd77-4csmb\" (UID: \"d471648d-735a-41d9-aad1-151a706f8fb1\") " pod="openshift-authentication/oauth-openshift-79d78bfd77-4csmb" Sep 29 19:00:35 crc kubenswrapper[4792]: I0929 19:00:35.507695 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/d471648d-735a-41d9-aad1-151a706f8fb1-v4-0-config-user-template-login\") pod \"oauth-openshift-79d78bfd77-4csmb\" (UID: \"d471648d-735a-41d9-aad1-151a706f8fb1\") " pod="openshift-authentication/oauth-openshift-79d78bfd77-4csmb" Sep 29 19:00:35 crc kubenswrapper[4792]: I0929 19:00:35.507719 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/d471648d-735a-41d9-aad1-151a706f8fb1-v4-0-config-system-serving-cert\") pod \"oauth-openshift-79d78bfd77-4csmb\" (UID: \"d471648d-735a-41d9-aad1-151a706f8fb1\") " pod="openshift-authentication/oauth-openshift-79d78bfd77-4csmb" Sep 29 19:00:35 crc kubenswrapper[4792]: I0929 19:00:35.507757 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/d471648d-735a-41d9-aad1-151a706f8fb1-v4-0-config-system-session\") pod \"oauth-openshift-79d78bfd77-4csmb\" (UID: \"d471648d-735a-41d9-aad1-151a706f8fb1\") " pod="openshift-authentication/oauth-openshift-79d78bfd77-4csmb" Sep 29 19:00:35 crc kubenswrapper[4792]: I0929 19:00:35.507782 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/d471648d-735a-41d9-aad1-151a706f8fb1-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-79d78bfd77-4csmb\" (UID: \"d471648d-735a-41d9-aad1-151a706f8fb1\") " pod="openshift-authentication/oauth-openshift-79d78bfd77-4csmb" Sep 29 19:00:35 crc kubenswrapper[4792]: I0929 19:00:35.507801 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/d471648d-735a-41d9-aad1-151a706f8fb1-v4-0-config-system-service-ca\") pod \"oauth-openshift-79d78bfd77-4csmb\" (UID: \"d471648d-735a-41d9-aad1-151a706f8fb1\") " pod="openshift-authentication/oauth-openshift-79d78bfd77-4csmb" Sep 29 19:00:35 crc kubenswrapper[4792]: I0929 19:00:35.507834 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/d471648d-735a-41d9-aad1-151a706f8fb1-v4-0-config-system-cliconfig\") pod \"oauth-openshift-79d78bfd77-4csmb\" (UID: \"d471648d-735a-41d9-aad1-151a706f8fb1\") " pod="openshift-authentication/oauth-openshift-79d78bfd77-4csmb" Sep 29 19:00:35 crc kubenswrapper[4792]: I0929 19:00:35.507885 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/d471648d-735a-41d9-aad1-151a706f8fb1-v4-0-config-user-template-error\") pod \"oauth-openshift-79d78bfd77-4csmb\" (UID: \"d471648d-735a-41d9-aad1-151a706f8fb1\") " pod="openshift-authentication/oauth-openshift-79d78bfd77-4csmb" Sep 29 19:00:35 crc kubenswrapper[4792]: I0929 19:00:35.507947 4792 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/56d0b0e8-6440-4f28-9d05-ad7be713a117-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Sep 29 19:00:35 crc kubenswrapper[4792]: I0929 19:00:35.507961 4792 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/56d0b0e8-6440-4f28-9d05-ad7be713a117-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Sep 29 19:00:35 crc kubenswrapper[4792]: I0929 19:00:35.507975 4792 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/56d0b0e8-6440-4f28-9d05-ad7be713a117-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Sep 29 19:00:35 crc kubenswrapper[4792]: I0929 19:00:35.507988 4792 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/56d0b0e8-6440-4f28-9d05-ad7be713a117-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Sep 29 19:00:35 crc kubenswrapper[4792]: I0929 19:00:35.508002 4792 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/56d0b0e8-6440-4f28-9d05-ad7be713a117-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Sep 29 19:00:35 crc kubenswrapper[4792]: I0929 19:00:35.508039 4792 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/56d0b0e8-6440-4f28-9d05-ad7be713a117-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 29 19:00:35 crc kubenswrapper[4792]: I0929 19:00:35.508052 4792 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/56d0b0e8-6440-4f28-9d05-ad7be713a117-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Sep 29 19:00:35 crc kubenswrapper[4792]: I0929 19:00:35.508064 4792 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/56d0b0e8-6440-4f28-9d05-ad7be713a117-audit-policies\") on node \"crc\" DevicePath \"\"" Sep 29 19:00:35 crc kubenswrapper[4792]: I0929 19:00:35.508076 4792 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/56d0b0e8-6440-4f28-9d05-ad7be713a117-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Sep 29 19:00:35 crc kubenswrapper[4792]: I0929 19:00:35.508109 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mttjt\" (UniqueName: \"kubernetes.io/projected/56d0b0e8-6440-4f28-9d05-ad7be713a117-kube-api-access-mttjt\") on node \"crc\" DevicePath \"\"" Sep 29 19:00:35 crc kubenswrapper[4792]: I0929 19:00:35.508123 4792 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/56d0b0e8-6440-4f28-9d05-ad7be713a117-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 19:00:35 crc kubenswrapper[4792]: I0929 19:00:35.508137 4792 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/56d0b0e8-6440-4f28-9d05-ad7be713a117-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Sep 29 19:00:35 crc kubenswrapper[4792]: I0929 19:00:35.508149 4792 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/56d0b0e8-6440-4f28-9d05-ad7be713a117-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Sep 29 19:00:35 crc kubenswrapper[4792]: I0929 19:00:35.508921 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/d471648d-735a-41d9-aad1-151a706f8fb1-v4-0-config-system-service-ca\") pod \"oauth-openshift-79d78bfd77-4csmb\" (UID: \"d471648d-735a-41d9-aad1-151a706f8fb1\") " pod="openshift-authentication/oauth-openshift-79d78bfd77-4csmb" Sep 29 19:00:35 crc kubenswrapper[4792]: I0929 19:00:35.510366 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/d471648d-735a-41d9-aad1-151a706f8fb1-v4-0-config-system-cliconfig\") pod \"oauth-openshift-79d78bfd77-4csmb\" (UID: \"d471648d-735a-41d9-aad1-151a706f8fb1\") " pod="openshift-authentication/oauth-openshift-79d78bfd77-4csmb" Sep 29 19:00:35 crc kubenswrapper[4792]: I0929 19:00:35.510474 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/d471648d-735a-41d9-aad1-151a706f8fb1-audit-dir\") pod \"oauth-openshift-79d78bfd77-4csmb\" (UID: \"d471648d-735a-41d9-aad1-151a706f8fb1\") " pod="openshift-authentication/oauth-openshift-79d78bfd77-4csmb" Sep 29 19:00:35 crc kubenswrapper[4792]: I0929 19:00:35.510553 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/d471648d-735a-41d9-aad1-151a706f8fb1-audit-policies\") pod \"oauth-openshift-79d78bfd77-4csmb\" (UID: \"d471648d-735a-41d9-aad1-151a706f8fb1\") " pod="openshift-authentication/oauth-openshift-79d78bfd77-4csmb" Sep 29 19:00:35 crc kubenswrapper[4792]: I0929 19:00:35.510692 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/d471648d-735a-41d9-aad1-151a706f8fb1-v4-0-config-user-template-error\") pod \"oauth-openshift-79d78bfd77-4csmb\" (UID: \"d471648d-735a-41d9-aad1-151a706f8fb1\") " pod="openshift-authentication/oauth-openshift-79d78bfd77-4csmb" Sep 29 19:00:35 crc kubenswrapper[4792]: I0929 19:00:35.511342 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/d471648d-735a-41d9-aad1-151a706f8fb1-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-79d78bfd77-4csmb\" (UID: \"d471648d-735a-41d9-aad1-151a706f8fb1\") " pod="openshift-authentication/oauth-openshift-79d78bfd77-4csmb" Sep 29 19:00:35 crc kubenswrapper[4792]: I0929 19:00:35.511381 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/d471648d-735a-41d9-aad1-151a706f8fb1-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-79d78bfd77-4csmb\" (UID: \"d471648d-735a-41d9-aad1-151a706f8fb1\") " pod="openshift-authentication/oauth-openshift-79d78bfd77-4csmb" Sep 29 19:00:35 crc kubenswrapper[4792]: I0929 19:00:35.512797 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/d471648d-735a-41d9-aad1-151a706f8fb1-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-79d78bfd77-4csmb\" (UID: \"d471648d-735a-41d9-aad1-151a706f8fb1\") " pod="openshift-authentication/oauth-openshift-79d78bfd77-4csmb" Sep 29 19:00:35 crc kubenswrapper[4792]: I0929 19:00:35.514277 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/d471648d-735a-41d9-aad1-151a706f8fb1-v4-0-config-system-session\") pod \"oauth-openshift-79d78bfd77-4csmb\" (UID: \"d471648d-735a-41d9-aad1-151a706f8fb1\") " pod="openshift-authentication/oauth-openshift-79d78bfd77-4csmb" Sep 29 19:00:35 crc kubenswrapper[4792]: I0929 19:00:35.514710 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/d471648d-735a-41d9-aad1-151a706f8fb1-v4-0-config-system-router-certs\") pod \"oauth-openshift-79d78bfd77-4csmb\" (UID: \"d471648d-735a-41d9-aad1-151a706f8fb1\") " pod="openshift-authentication/oauth-openshift-79d78bfd77-4csmb" Sep 29 19:00:35 crc kubenswrapper[4792]: I0929 19:00:35.515877 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/d471648d-735a-41d9-aad1-151a706f8fb1-v4-0-config-system-serving-cert\") pod \"oauth-openshift-79d78bfd77-4csmb\" (UID: \"d471648d-735a-41d9-aad1-151a706f8fb1\") " pod="openshift-authentication/oauth-openshift-79d78bfd77-4csmb" Sep 29 19:00:35 crc kubenswrapper[4792]: I0929 19:00:35.521113 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/d471648d-735a-41d9-aad1-151a706f8fb1-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-79d78bfd77-4csmb\" (UID: \"d471648d-735a-41d9-aad1-151a706f8fb1\") " pod="openshift-authentication/oauth-openshift-79d78bfd77-4csmb" Sep 29 19:00:35 crc kubenswrapper[4792]: I0929 19:00:35.525428 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/d471648d-735a-41d9-aad1-151a706f8fb1-v4-0-config-user-template-login\") pod \"oauth-openshift-79d78bfd77-4csmb\" (UID: \"d471648d-735a-41d9-aad1-151a706f8fb1\") " pod="openshift-authentication/oauth-openshift-79d78bfd77-4csmb" Sep 29 19:00:35 crc kubenswrapper[4792]: I0929 19:00:35.527152 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8vznk\" (UniqueName: \"kubernetes.io/projected/d471648d-735a-41d9-aad1-151a706f8fb1-kube-api-access-8vznk\") pod \"oauth-openshift-79d78bfd77-4csmb\" (UID: \"d471648d-735a-41d9-aad1-151a706f8fb1\") " pod="openshift-authentication/oauth-openshift-79d78bfd77-4csmb" Sep 29 19:00:35 crc kubenswrapper[4792]: I0929 19:00:35.566751 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-4pj4c"] Sep 29 19:00:35 crc kubenswrapper[4792]: I0929 19:00:35.568441 4792 scope.go:117] "RemoveContainer" containerID="c1ce633d2a2c5a4cb32631b665a6a6c0afb59e75fbeb0b551836d22a9e28724f" Sep 29 19:00:35 crc kubenswrapper[4792]: E0929 19:00:35.569127 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c1ce633d2a2c5a4cb32631b665a6a6c0afb59e75fbeb0b551836d22a9e28724f\": container with ID starting with c1ce633d2a2c5a4cb32631b665a6a6c0afb59e75fbeb0b551836d22a9e28724f not found: ID does not exist" containerID="c1ce633d2a2c5a4cb32631b665a6a6c0afb59e75fbeb0b551836d22a9e28724f" Sep 29 19:00:35 crc kubenswrapper[4792]: I0929 19:00:35.569163 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c1ce633d2a2c5a4cb32631b665a6a6c0afb59e75fbeb0b551836d22a9e28724f"} err="failed to get container status \"c1ce633d2a2c5a4cb32631b665a6a6c0afb59e75fbeb0b551836d22a9e28724f\": rpc error: code = NotFound desc = could not find container \"c1ce633d2a2c5a4cb32631b665a6a6c0afb59e75fbeb0b551836d22a9e28724f\": container with ID starting with c1ce633d2a2c5a4cb32631b665a6a6c0afb59e75fbeb0b551836d22a9e28724f not found: ID does not exist" Sep 29 19:00:35 crc kubenswrapper[4792]: I0929 19:00:35.570202 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-4pj4c"] Sep 29 19:00:35 crc kubenswrapper[4792]: I0929 19:00:35.672452 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-79d78bfd77-4csmb" Sep 29 19:00:36 crc kubenswrapper[4792]: I0929 19:00:36.059576 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-79d78bfd77-4csmb"] Sep 29 19:00:36 crc kubenswrapper[4792]: I0929 19:00:36.502046 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-79d78bfd77-4csmb" event={"ID":"d471648d-735a-41d9-aad1-151a706f8fb1","Type":"ContainerStarted","Data":"b3baa69dd5bd04875f2aa6add4089ce6dca4e4aaf6da91f17cfe2f4a928d1b68"} Sep 29 19:00:36 crc kubenswrapper[4792]: I0929 19:00:36.503027 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-79d78bfd77-4csmb" event={"ID":"d471648d-735a-41d9-aad1-151a706f8fb1","Type":"ContainerStarted","Data":"64bd32500d2bc29e3f65eedd3e5b35974b6b6080c05058171abf2b21fc83400a"} Sep 29 19:00:36 crc kubenswrapper[4792]: I0929 19:00:36.503063 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-79d78bfd77-4csmb" Sep 29 19:00:36 crc kubenswrapper[4792]: I0929 19:00:36.540018 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-79d78bfd77-4csmb" podStartSLOduration=27.539996702 podStartE2EDuration="27.539996702s" podCreationTimestamp="2025-09-29 19:00:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 19:00:36.533684517 +0000 UTC m=+248.526991953" watchObservedRunningTime="2025-09-29 19:00:36.539996702 +0000 UTC m=+248.533304118" Sep 29 19:00:36 crc kubenswrapper[4792]: I0929 19:00:36.894824 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-79d78bfd77-4csmb" Sep 29 19:00:37 crc kubenswrapper[4792]: I0929 19:00:37.022134 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="56d0b0e8-6440-4f28-9d05-ad7be713a117" path="/var/lib/kubelet/pods/56d0b0e8-6440-4f28-9d05-ad7be713a117/volumes" Sep 29 19:00:54 crc kubenswrapper[4792]: I0929 19:00:54.479647 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-wmjgp"] Sep 29 19:00:54 crc kubenswrapper[4792]: I0929 19:00:54.482096 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-wmjgp" podUID="c4e9ba50-617f-4f99-9430-ee6fb3d21b8e" containerName="registry-server" containerID="cri-o://5a961a166e2e652bd3f74ea6ad271d1dbff21f3a6f515108e484304ade0d2048" gracePeriod=30 Sep 29 19:00:54 crc kubenswrapper[4792]: I0929 19:00:54.486616 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-llhzn"] Sep 29 19:00:54 crc kubenswrapper[4792]: I0929 19:00:54.487266 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-llhzn" podUID="de0a9077-e8b8-4b2c-bfdf-4e965627f520" containerName="registry-server" containerID="cri-o://0a2bf2229314f16f943c0314ec0dbfbe7261126e2040610b1c8ef94769f07a47" gracePeriod=30 Sep 29 19:00:54 crc kubenswrapper[4792]: I0929 19:00:54.497668 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-6r69d"] Sep 29 19:00:54 crc kubenswrapper[4792]: I0929 19:00:54.497889 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/marketplace-operator-79b997595-6r69d" podUID="502b8b77-8c80-4cc0-8590-6fb9ce342289" containerName="marketplace-operator" containerID="cri-o://d4df5141117ee82288cfaca12256a8e952d2fedf4239c0ae5eaf254327076ae4" gracePeriod=30 Sep 29 19:00:54 crc kubenswrapper[4792]: I0929 19:00:54.506326 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-rwhnq"] Sep 29 19:00:54 crc kubenswrapper[4792]: I0929 19:00:54.506605 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-rwhnq" podUID="f9df84a0-6eb0-415a-ae29-c93ba496f855" containerName="registry-server" containerID="cri-o://91a7ed73d691f49f1f4f3cacf336bc000ac0ea984bb4b4662ab4c51557c092c3" gracePeriod=30 Sep 29 19:00:54 crc kubenswrapper[4792]: I0929 19:00:54.517247 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-zr4zx"] Sep 29 19:00:54 crc kubenswrapper[4792]: I0929 19:00:54.517547 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-zr4zx" podUID="71994559-92a6-4331-bbbb-04e9d2498c9b" containerName="registry-server" containerID="cri-o://0c0d65218eefd43259d3addcf0d6fd34bd71915cdf7387e114268297dc9b58ab" gracePeriod=30 Sep 29 19:00:54 crc kubenswrapper[4792]: I0929 19:00:54.531384 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-mjdnk"] Sep 29 19:00:54 crc kubenswrapper[4792]: I0929 19:00:54.532133 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-mjdnk" Sep 29 19:00:54 crc kubenswrapper[4792]: I0929 19:00:54.547697 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/7ae10600-0f4b-4b98-b304-a13cb5283d63-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-mjdnk\" (UID: \"7ae10600-0f4b-4b98-b304-a13cb5283d63\") " pod="openshift-marketplace/marketplace-operator-79b997595-mjdnk" Sep 29 19:00:54 crc kubenswrapper[4792]: I0929 19:00:54.547761 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/7ae10600-0f4b-4b98-b304-a13cb5283d63-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-mjdnk\" (UID: \"7ae10600-0f4b-4b98-b304-a13cb5283d63\") " pod="openshift-marketplace/marketplace-operator-79b997595-mjdnk" Sep 29 19:00:54 crc kubenswrapper[4792]: I0929 19:00:54.547781 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kcf4p\" (UniqueName: \"kubernetes.io/projected/7ae10600-0f4b-4b98-b304-a13cb5283d63-kube-api-access-kcf4p\") pod \"marketplace-operator-79b997595-mjdnk\" (UID: \"7ae10600-0f4b-4b98-b304-a13cb5283d63\") " pod="openshift-marketplace/marketplace-operator-79b997595-mjdnk" Sep 29 19:00:54 crc kubenswrapper[4792]: I0929 19:00:54.556120 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-mjdnk"] Sep 29 19:00:54 crc kubenswrapper[4792]: I0929 19:00:54.615406 4792 generic.go:334] "Generic (PLEG): container finished" podID="c4e9ba50-617f-4f99-9430-ee6fb3d21b8e" containerID="5a961a166e2e652bd3f74ea6ad271d1dbff21f3a6f515108e484304ade0d2048" exitCode=0 Sep 29 19:00:54 crc kubenswrapper[4792]: I0929 19:00:54.615479 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-wmjgp" event={"ID":"c4e9ba50-617f-4f99-9430-ee6fb3d21b8e","Type":"ContainerDied","Data":"5a961a166e2e652bd3f74ea6ad271d1dbff21f3a6f515108e484304ade0d2048"} Sep 29 19:00:54 crc kubenswrapper[4792]: I0929 19:00:54.617050 4792 generic.go:334] "Generic (PLEG): container finished" podID="502b8b77-8c80-4cc0-8590-6fb9ce342289" containerID="d4df5141117ee82288cfaca12256a8e952d2fedf4239c0ae5eaf254327076ae4" exitCode=0 Sep 29 19:00:54 crc kubenswrapper[4792]: I0929 19:00:54.617121 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-6r69d" event={"ID":"502b8b77-8c80-4cc0-8590-6fb9ce342289","Type":"ContainerDied","Data":"d4df5141117ee82288cfaca12256a8e952d2fedf4239c0ae5eaf254327076ae4"} Sep 29 19:00:54 crc kubenswrapper[4792]: I0929 19:00:54.619796 4792 generic.go:334] "Generic (PLEG): container finished" podID="de0a9077-e8b8-4b2c-bfdf-4e965627f520" containerID="0a2bf2229314f16f943c0314ec0dbfbe7261126e2040610b1c8ef94769f07a47" exitCode=0 Sep 29 19:00:54 crc kubenswrapper[4792]: I0929 19:00:54.619818 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-llhzn" event={"ID":"de0a9077-e8b8-4b2c-bfdf-4e965627f520","Type":"ContainerDied","Data":"0a2bf2229314f16f943c0314ec0dbfbe7261126e2040610b1c8ef94769f07a47"} Sep 29 19:00:54 crc kubenswrapper[4792]: I0929 19:00:54.651892 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/7ae10600-0f4b-4b98-b304-a13cb5283d63-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-mjdnk\" (UID: \"7ae10600-0f4b-4b98-b304-a13cb5283d63\") " pod="openshift-marketplace/marketplace-operator-79b997595-mjdnk" Sep 29 19:00:54 crc kubenswrapper[4792]: I0929 19:00:54.651929 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kcf4p\" (UniqueName: \"kubernetes.io/projected/7ae10600-0f4b-4b98-b304-a13cb5283d63-kube-api-access-kcf4p\") pod \"marketplace-operator-79b997595-mjdnk\" (UID: \"7ae10600-0f4b-4b98-b304-a13cb5283d63\") " pod="openshift-marketplace/marketplace-operator-79b997595-mjdnk" Sep 29 19:00:54 crc kubenswrapper[4792]: I0929 19:00:54.651998 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/7ae10600-0f4b-4b98-b304-a13cb5283d63-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-mjdnk\" (UID: \"7ae10600-0f4b-4b98-b304-a13cb5283d63\") " pod="openshift-marketplace/marketplace-operator-79b997595-mjdnk" Sep 29 19:00:54 crc kubenswrapper[4792]: I0929 19:00:54.656353 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/7ae10600-0f4b-4b98-b304-a13cb5283d63-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-mjdnk\" (UID: \"7ae10600-0f4b-4b98-b304-a13cb5283d63\") " pod="openshift-marketplace/marketplace-operator-79b997595-mjdnk" Sep 29 19:00:54 crc kubenswrapper[4792]: I0929 19:00:54.660509 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/7ae10600-0f4b-4b98-b304-a13cb5283d63-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-mjdnk\" (UID: \"7ae10600-0f4b-4b98-b304-a13cb5283d63\") " pod="openshift-marketplace/marketplace-operator-79b997595-mjdnk" Sep 29 19:00:54 crc kubenswrapper[4792]: I0929 19:00:54.672696 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kcf4p\" (UniqueName: \"kubernetes.io/projected/7ae10600-0f4b-4b98-b304-a13cb5283d63-kube-api-access-kcf4p\") pod \"marketplace-operator-79b997595-mjdnk\" (UID: \"7ae10600-0f4b-4b98-b304-a13cb5283d63\") " pod="openshift-marketplace/marketplace-operator-79b997595-mjdnk" Sep 29 19:00:54 crc kubenswrapper[4792]: I0929 19:00:54.854145 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-mjdnk" Sep 29 19:00:54 crc kubenswrapper[4792]: I0929 19:00:54.866388 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-llhzn" Sep 29 19:00:54 crc kubenswrapper[4792]: I0929 19:00:54.948591 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-6r69d" Sep 29 19:00:54 crc kubenswrapper[4792]: I0929 19:00:54.954914 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/de0a9077-e8b8-4b2c-bfdf-4e965627f520-utilities\") pod \"de0a9077-e8b8-4b2c-bfdf-4e965627f520\" (UID: \"de0a9077-e8b8-4b2c-bfdf-4e965627f520\") " Sep 29 19:00:54 crc kubenswrapper[4792]: I0929 19:00:54.957442 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/de0a9077-e8b8-4b2c-bfdf-4e965627f520-utilities" (OuterVolumeSpecName: "utilities") pod "de0a9077-e8b8-4b2c-bfdf-4e965627f520" (UID: "de0a9077-e8b8-4b2c-bfdf-4e965627f520"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 19:00:54 crc kubenswrapper[4792]: I0929 19:00:54.957845 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/de0a9077-e8b8-4b2c-bfdf-4e965627f520-catalog-content\") pod \"de0a9077-e8b8-4b2c-bfdf-4e965627f520\" (UID: \"de0a9077-e8b8-4b2c-bfdf-4e965627f520\") " Sep 29 19:00:54 crc kubenswrapper[4792]: I0929 19:00:54.963241 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cr2rq\" (UniqueName: \"kubernetes.io/projected/de0a9077-e8b8-4b2c-bfdf-4e965627f520-kube-api-access-cr2rq\") pod \"de0a9077-e8b8-4b2c-bfdf-4e965627f520\" (UID: \"de0a9077-e8b8-4b2c-bfdf-4e965627f520\") " Sep 29 19:00:54 crc kubenswrapper[4792]: I0929 19:00:54.963462 4792 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/de0a9077-e8b8-4b2c-bfdf-4e965627f520-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 19:00:54 crc kubenswrapper[4792]: I0929 19:00:54.978688 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/de0a9077-e8b8-4b2c-bfdf-4e965627f520-kube-api-access-cr2rq" (OuterVolumeSpecName: "kube-api-access-cr2rq") pod "de0a9077-e8b8-4b2c-bfdf-4e965627f520" (UID: "de0a9077-e8b8-4b2c-bfdf-4e965627f520"). InnerVolumeSpecName "kube-api-access-cr2rq". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:00:54 crc kubenswrapper[4792]: I0929 19:00:54.989306 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-wmjgp" Sep 29 19:00:55 crc kubenswrapper[4792]: I0929 19:00:55.027111 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-zr4zx" Sep 29 19:00:55 crc kubenswrapper[4792]: I0929 19:00:55.054180 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-rwhnq" Sep 29 19:00:55 crc kubenswrapper[4792]: I0929 19:00:55.064526 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-l88f5\" (UniqueName: \"kubernetes.io/projected/71994559-92a6-4331-bbbb-04e9d2498c9b-kube-api-access-l88f5\") pod \"71994559-92a6-4331-bbbb-04e9d2498c9b\" (UID: \"71994559-92a6-4331-bbbb-04e9d2498c9b\") " Sep 29 19:00:55 crc kubenswrapper[4792]: I0929 19:00:55.064598 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/502b8b77-8c80-4cc0-8590-6fb9ce342289-marketplace-trusted-ca\") pod \"502b8b77-8c80-4cc0-8590-6fb9ce342289\" (UID: \"502b8b77-8c80-4cc0-8590-6fb9ce342289\") " Sep 29 19:00:55 crc kubenswrapper[4792]: I0929 19:00:55.064639 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c4e9ba50-617f-4f99-9430-ee6fb3d21b8e-utilities\") pod \"c4e9ba50-617f-4f99-9430-ee6fb3d21b8e\" (UID: \"c4e9ba50-617f-4f99-9430-ee6fb3d21b8e\") " Sep 29 19:00:55 crc kubenswrapper[4792]: I0929 19:00:55.064682 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c4e9ba50-617f-4f99-9430-ee6fb3d21b8e-catalog-content\") pod \"c4e9ba50-617f-4f99-9430-ee6fb3d21b8e\" (UID: \"c4e9ba50-617f-4f99-9430-ee6fb3d21b8e\") " Sep 29 19:00:55 crc kubenswrapper[4792]: I0929 19:00:55.064715 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2gmfg\" (UniqueName: \"kubernetes.io/projected/c4e9ba50-617f-4f99-9430-ee6fb3d21b8e-kube-api-access-2gmfg\") pod \"c4e9ba50-617f-4f99-9430-ee6fb3d21b8e\" (UID: \"c4e9ba50-617f-4f99-9430-ee6fb3d21b8e\") " Sep 29 19:00:55 crc kubenswrapper[4792]: I0929 19:00:55.064753 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/502b8b77-8c80-4cc0-8590-6fb9ce342289-marketplace-operator-metrics\") pod \"502b8b77-8c80-4cc0-8590-6fb9ce342289\" (UID: \"502b8b77-8c80-4cc0-8590-6fb9ce342289\") " Sep 29 19:00:55 crc kubenswrapper[4792]: I0929 19:00:55.064792 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hj5jh\" (UniqueName: \"kubernetes.io/projected/502b8b77-8c80-4cc0-8590-6fb9ce342289-kube-api-access-hj5jh\") pod \"502b8b77-8c80-4cc0-8590-6fb9ce342289\" (UID: \"502b8b77-8c80-4cc0-8590-6fb9ce342289\") " Sep 29 19:00:55 crc kubenswrapper[4792]: I0929 19:00:55.064808 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/71994559-92a6-4331-bbbb-04e9d2498c9b-utilities\") pod \"71994559-92a6-4331-bbbb-04e9d2498c9b\" (UID: \"71994559-92a6-4331-bbbb-04e9d2498c9b\") " Sep 29 19:00:55 crc kubenswrapper[4792]: I0929 19:00:55.064832 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/71994559-92a6-4331-bbbb-04e9d2498c9b-catalog-content\") pod \"71994559-92a6-4331-bbbb-04e9d2498c9b\" (UID: \"71994559-92a6-4331-bbbb-04e9d2498c9b\") " Sep 29 19:00:55 crc kubenswrapper[4792]: I0929 19:00:55.065048 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cr2rq\" (UniqueName: \"kubernetes.io/projected/de0a9077-e8b8-4b2c-bfdf-4e965627f520-kube-api-access-cr2rq\") on node \"crc\" DevicePath \"\"" Sep 29 19:00:55 crc kubenswrapper[4792]: I0929 19:00:55.065819 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/502b8b77-8c80-4cc0-8590-6fb9ce342289-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "502b8b77-8c80-4cc0-8590-6fb9ce342289" (UID: "502b8b77-8c80-4cc0-8590-6fb9ce342289"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:00:55 crc kubenswrapper[4792]: I0929 19:00:55.066197 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c4e9ba50-617f-4f99-9430-ee6fb3d21b8e-utilities" (OuterVolumeSpecName: "utilities") pod "c4e9ba50-617f-4f99-9430-ee6fb3d21b8e" (UID: "c4e9ba50-617f-4f99-9430-ee6fb3d21b8e"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 19:00:55 crc kubenswrapper[4792]: I0929 19:00:55.066699 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/71994559-92a6-4331-bbbb-04e9d2498c9b-utilities" (OuterVolumeSpecName: "utilities") pod "71994559-92a6-4331-bbbb-04e9d2498c9b" (UID: "71994559-92a6-4331-bbbb-04e9d2498c9b"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 19:00:55 crc kubenswrapper[4792]: I0929 19:00:55.069953 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/502b8b77-8c80-4cc0-8590-6fb9ce342289-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "502b8b77-8c80-4cc0-8590-6fb9ce342289" (UID: "502b8b77-8c80-4cc0-8590-6fb9ce342289"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:00:55 crc kubenswrapper[4792]: I0929 19:00:55.075480 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/502b8b77-8c80-4cc0-8590-6fb9ce342289-kube-api-access-hj5jh" (OuterVolumeSpecName: "kube-api-access-hj5jh") pod "502b8b77-8c80-4cc0-8590-6fb9ce342289" (UID: "502b8b77-8c80-4cc0-8590-6fb9ce342289"). InnerVolumeSpecName "kube-api-access-hj5jh". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:00:55 crc kubenswrapper[4792]: I0929 19:00:55.075582 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c4e9ba50-617f-4f99-9430-ee6fb3d21b8e-kube-api-access-2gmfg" (OuterVolumeSpecName: "kube-api-access-2gmfg") pod "c4e9ba50-617f-4f99-9430-ee6fb3d21b8e" (UID: "c4e9ba50-617f-4f99-9430-ee6fb3d21b8e"). InnerVolumeSpecName "kube-api-access-2gmfg". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:00:55 crc kubenswrapper[4792]: I0929 19:00:55.079575 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/71994559-92a6-4331-bbbb-04e9d2498c9b-kube-api-access-l88f5" (OuterVolumeSpecName: "kube-api-access-l88f5") pod "71994559-92a6-4331-bbbb-04e9d2498c9b" (UID: "71994559-92a6-4331-bbbb-04e9d2498c9b"). InnerVolumeSpecName "kube-api-access-l88f5". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:00:55 crc kubenswrapper[4792]: I0929 19:00:55.103594 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/de0a9077-e8b8-4b2c-bfdf-4e965627f520-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "de0a9077-e8b8-4b2c-bfdf-4e965627f520" (UID: "de0a9077-e8b8-4b2c-bfdf-4e965627f520"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 19:00:55 crc kubenswrapper[4792]: I0929 19:00:55.166326 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f9df84a0-6eb0-415a-ae29-c93ba496f855-catalog-content\") pod \"f9df84a0-6eb0-415a-ae29-c93ba496f855\" (UID: \"f9df84a0-6eb0-415a-ae29-c93ba496f855\") " Sep 29 19:00:55 crc kubenswrapper[4792]: I0929 19:00:55.166389 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-c2z2m\" (UniqueName: \"kubernetes.io/projected/f9df84a0-6eb0-415a-ae29-c93ba496f855-kube-api-access-c2z2m\") pod \"f9df84a0-6eb0-415a-ae29-c93ba496f855\" (UID: \"f9df84a0-6eb0-415a-ae29-c93ba496f855\") " Sep 29 19:00:55 crc kubenswrapper[4792]: I0929 19:00:55.167070 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f9df84a0-6eb0-415a-ae29-c93ba496f855-utilities\") pod \"f9df84a0-6eb0-415a-ae29-c93ba496f855\" (UID: \"f9df84a0-6eb0-415a-ae29-c93ba496f855\") " Sep 29 19:00:55 crc kubenswrapper[4792]: I0929 19:00:55.167394 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-l88f5\" (UniqueName: \"kubernetes.io/projected/71994559-92a6-4331-bbbb-04e9d2498c9b-kube-api-access-l88f5\") on node \"crc\" DevicePath \"\"" Sep 29 19:00:55 crc kubenswrapper[4792]: I0929 19:00:55.167408 4792 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/de0a9077-e8b8-4b2c-bfdf-4e965627f520-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 19:00:55 crc kubenswrapper[4792]: I0929 19:00:55.167418 4792 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/502b8b77-8c80-4cc0-8590-6fb9ce342289-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Sep 29 19:00:55 crc kubenswrapper[4792]: I0929 19:00:55.167426 4792 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c4e9ba50-617f-4f99-9430-ee6fb3d21b8e-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 19:00:55 crc kubenswrapper[4792]: I0929 19:00:55.167435 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2gmfg\" (UniqueName: \"kubernetes.io/projected/c4e9ba50-617f-4f99-9430-ee6fb3d21b8e-kube-api-access-2gmfg\") on node \"crc\" DevicePath \"\"" Sep 29 19:00:55 crc kubenswrapper[4792]: I0929 19:00:55.167444 4792 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/502b8b77-8c80-4cc0-8590-6fb9ce342289-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Sep 29 19:00:55 crc kubenswrapper[4792]: I0929 19:00:55.167455 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hj5jh\" (UniqueName: \"kubernetes.io/projected/502b8b77-8c80-4cc0-8590-6fb9ce342289-kube-api-access-hj5jh\") on node \"crc\" DevicePath \"\"" Sep 29 19:00:55 crc kubenswrapper[4792]: I0929 19:00:55.167464 4792 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/71994559-92a6-4331-bbbb-04e9d2498c9b-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 19:00:55 crc kubenswrapper[4792]: I0929 19:00:55.168089 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f9df84a0-6eb0-415a-ae29-c93ba496f855-utilities" (OuterVolumeSpecName: "utilities") pod "f9df84a0-6eb0-415a-ae29-c93ba496f855" (UID: "f9df84a0-6eb0-415a-ae29-c93ba496f855"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 19:00:55 crc kubenswrapper[4792]: I0929 19:00:55.169118 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f9df84a0-6eb0-415a-ae29-c93ba496f855-kube-api-access-c2z2m" (OuterVolumeSpecName: "kube-api-access-c2z2m") pod "f9df84a0-6eb0-415a-ae29-c93ba496f855" (UID: "f9df84a0-6eb0-415a-ae29-c93ba496f855"). InnerVolumeSpecName "kube-api-access-c2z2m". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:00:55 crc kubenswrapper[4792]: I0929 19:00:55.180360 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c4e9ba50-617f-4f99-9430-ee6fb3d21b8e-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "c4e9ba50-617f-4f99-9430-ee6fb3d21b8e" (UID: "c4e9ba50-617f-4f99-9430-ee6fb3d21b8e"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 19:00:55 crc kubenswrapper[4792]: I0929 19:00:55.190101 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f9df84a0-6eb0-415a-ae29-c93ba496f855-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "f9df84a0-6eb0-415a-ae29-c93ba496f855" (UID: "f9df84a0-6eb0-415a-ae29-c93ba496f855"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 19:00:55 crc kubenswrapper[4792]: I0929 19:00:55.200273 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/71994559-92a6-4331-bbbb-04e9d2498c9b-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "71994559-92a6-4331-bbbb-04e9d2498c9b" (UID: "71994559-92a6-4331-bbbb-04e9d2498c9b"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 19:00:55 crc kubenswrapper[4792]: I0929 19:00:55.268810 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-c2z2m\" (UniqueName: \"kubernetes.io/projected/f9df84a0-6eb0-415a-ae29-c93ba496f855-kube-api-access-c2z2m\") on node \"crc\" DevicePath \"\"" Sep 29 19:00:55 crc kubenswrapper[4792]: I0929 19:00:55.268891 4792 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/71994559-92a6-4331-bbbb-04e9d2498c9b-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 19:00:55 crc kubenswrapper[4792]: I0929 19:00:55.268904 4792 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f9df84a0-6eb0-415a-ae29-c93ba496f855-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 19:00:55 crc kubenswrapper[4792]: I0929 19:00:55.268913 4792 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c4e9ba50-617f-4f99-9430-ee6fb3d21b8e-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 19:00:55 crc kubenswrapper[4792]: I0929 19:00:55.268923 4792 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f9df84a0-6eb0-415a-ae29-c93ba496f855-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 19:00:55 crc kubenswrapper[4792]: I0929 19:00:55.372821 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-mjdnk"] Sep 29 19:00:55 crc kubenswrapper[4792]: I0929 19:00:55.627364 4792 generic.go:334] "Generic (PLEG): container finished" podID="f9df84a0-6eb0-415a-ae29-c93ba496f855" containerID="91a7ed73d691f49f1f4f3cacf336bc000ac0ea984bb4b4662ab4c51557c092c3" exitCode=0 Sep 29 19:00:55 crc kubenswrapper[4792]: I0929 19:00:55.627614 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-rwhnq" Sep 29 19:00:55 crc kubenswrapper[4792]: I0929 19:00:55.627506 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-rwhnq" event={"ID":"f9df84a0-6eb0-415a-ae29-c93ba496f855","Type":"ContainerDied","Data":"91a7ed73d691f49f1f4f3cacf336bc000ac0ea984bb4b4662ab4c51557c092c3"} Sep 29 19:00:55 crc kubenswrapper[4792]: I0929 19:00:55.627669 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-rwhnq" event={"ID":"f9df84a0-6eb0-415a-ae29-c93ba496f855","Type":"ContainerDied","Data":"8b9aa392a9aa5a8f7c3733593131b28f53d6ad590a2a1e16bc8bbfff0b128372"} Sep 29 19:00:55 crc kubenswrapper[4792]: I0929 19:00:55.627692 4792 scope.go:117] "RemoveContainer" containerID="91a7ed73d691f49f1f4f3cacf336bc000ac0ea984bb4b4662ab4c51557c092c3" Sep 29 19:00:55 crc kubenswrapper[4792]: I0929 19:00:55.638175 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-6r69d" Sep 29 19:00:55 crc kubenswrapper[4792]: I0929 19:00:55.638173 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-6r69d" event={"ID":"502b8b77-8c80-4cc0-8590-6fb9ce342289","Type":"ContainerDied","Data":"fa63383ff0e09162e1730c626421fe38322953b2c3783709d59020c3446e769c"} Sep 29 19:00:55 crc kubenswrapper[4792]: I0929 19:00:55.643684 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-llhzn" Sep 29 19:00:55 crc kubenswrapper[4792]: I0929 19:00:55.643690 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-llhzn" event={"ID":"de0a9077-e8b8-4b2c-bfdf-4e965627f520","Type":"ContainerDied","Data":"77b201a3a10d15a7d22a23e810153a5520eacd854960f6019cee8a231070e6fd"} Sep 29 19:00:55 crc kubenswrapper[4792]: I0929 19:00:55.650425 4792 generic.go:334] "Generic (PLEG): container finished" podID="71994559-92a6-4331-bbbb-04e9d2498c9b" containerID="0c0d65218eefd43259d3addcf0d6fd34bd71915cdf7387e114268297dc9b58ab" exitCode=0 Sep 29 19:00:55 crc kubenswrapper[4792]: I0929 19:00:55.650496 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-zr4zx" event={"ID":"71994559-92a6-4331-bbbb-04e9d2498c9b","Type":"ContainerDied","Data":"0c0d65218eefd43259d3addcf0d6fd34bd71915cdf7387e114268297dc9b58ab"} Sep 29 19:00:55 crc kubenswrapper[4792]: I0929 19:00:55.650524 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-zr4zx" event={"ID":"71994559-92a6-4331-bbbb-04e9d2498c9b","Type":"ContainerDied","Data":"47b480c16e708df0b6b4bef02dca9298fa0d2922c7632ca9d195491d7f6828cc"} Sep 29 19:00:55 crc kubenswrapper[4792]: I0929 19:00:55.650498 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-zr4zx" Sep 29 19:00:55 crc kubenswrapper[4792]: I0929 19:00:55.659652 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-mjdnk" event={"ID":"7ae10600-0f4b-4b98-b304-a13cb5283d63","Type":"ContainerStarted","Data":"5038c154008a146b1d41bb04a1ca49835d4b5c9e715210ee5e0bc1a776412c2b"} Sep 29 19:00:55 crc kubenswrapper[4792]: I0929 19:00:55.659687 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-mjdnk" event={"ID":"7ae10600-0f4b-4b98-b304-a13cb5283d63","Type":"ContainerStarted","Data":"71235ee186210ea40596f84cbb8904b95176e444ee55772ee07ecee5d2d093b8"} Sep 29 19:00:55 crc kubenswrapper[4792]: I0929 19:00:55.659703 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-mjdnk" Sep 29 19:00:55 crc kubenswrapper[4792]: I0929 19:00:55.661479 4792 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-mjdnk container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.56:8080/healthz\": dial tcp 10.217.0.56:8080: connect: connection refused" start-of-body= Sep 29 19:00:55 crc kubenswrapper[4792]: I0929 19:00:55.661603 4792 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-mjdnk" podUID="7ae10600-0f4b-4b98-b304-a13cb5283d63" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.56:8080/healthz\": dial tcp 10.217.0.56:8080: connect: connection refused" Sep 29 19:00:55 crc kubenswrapper[4792]: I0929 19:00:55.664465 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-wmjgp" event={"ID":"c4e9ba50-617f-4f99-9430-ee6fb3d21b8e","Type":"ContainerDied","Data":"8c6c1d5a7b4302c79d9b9dce64f1b945c232546cedc82a9510c0cd312593d0b8"} Sep 29 19:00:55 crc kubenswrapper[4792]: I0929 19:00:55.664535 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-wmjgp" Sep 29 19:00:55 crc kubenswrapper[4792]: I0929 19:00:55.671009 4792 scope.go:117] "RemoveContainer" containerID="a7efc8f1ef22d26cfefe3c43133c19058f502c01dbe9619e9cf5a3e4a976e818" Sep 29 19:00:55 crc kubenswrapper[4792]: I0929 19:00:55.686797 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-mjdnk" podStartSLOduration=1.6867778009999999 podStartE2EDuration="1.686777801s" podCreationTimestamp="2025-09-29 19:00:54 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 19:00:55.68364233 +0000 UTC m=+267.676949746" watchObservedRunningTime="2025-09-29 19:00:55.686777801 +0000 UTC m=+267.680085207" Sep 29 19:00:55 crc kubenswrapper[4792]: I0929 19:00:55.699175 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-zr4zx"] Sep 29 19:00:55 crc kubenswrapper[4792]: I0929 19:00:55.699699 4792 scope.go:117] "RemoveContainer" containerID="a6c83bf8aa059b878605c7727573a1b67348f900120e0f2162eedb7ffb45af5f" Sep 29 19:00:55 crc kubenswrapper[4792]: I0929 19:00:55.705764 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-zr4zx"] Sep 29 19:00:55 crc kubenswrapper[4792]: I0929 19:00:55.713831 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-llhzn"] Sep 29 19:00:55 crc kubenswrapper[4792]: I0929 19:00:55.718908 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-llhzn"] Sep 29 19:00:55 crc kubenswrapper[4792]: I0929 19:00:55.719756 4792 scope.go:117] "RemoveContainer" containerID="91a7ed73d691f49f1f4f3cacf336bc000ac0ea984bb4b4662ab4c51557c092c3" Sep 29 19:00:55 crc kubenswrapper[4792]: E0929 19:00:55.720590 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"91a7ed73d691f49f1f4f3cacf336bc000ac0ea984bb4b4662ab4c51557c092c3\": container with ID starting with 91a7ed73d691f49f1f4f3cacf336bc000ac0ea984bb4b4662ab4c51557c092c3 not found: ID does not exist" containerID="91a7ed73d691f49f1f4f3cacf336bc000ac0ea984bb4b4662ab4c51557c092c3" Sep 29 19:00:55 crc kubenswrapper[4792]: I0929 19:00:55.720874 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"91a7ed73d691f49f1f4f3cacf336bc000ac0ea984bb4b4662ab4c51557c092c3"} err="failed to get container status \"91a7ed73d691f49f1f4f3cacf336bc000ac0ea984bb4b4662ab4c51557c092c3\": rpc error: code = NotFound desc = could not find container \"91a7ed73d691f49f1f4f3cacf336bc000ac0ea984bb4b4662ab4c51557c092c3\": container with ID starting with 91a7ed73d691f49f1f4f3cacf336bc000ac0ea984bb4b4662ab4c51557c092c3 not found: ID does not exist" Sep 29 19:00:55 crc kubenswrapper[4792]: I0929 19:00:55.720988 4792 scope.go:117] "RemoveContainer" containerID="a7efc8f1ef22d26cfefe3c43133c19058f502c01dbe9619e9cf5a3e4a976e818" Sep 29 19:00:55 crc kubenswrapper[4792]: E0929 19:00:55.721700 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a7efc8f1ef22d26cfefe3c43133c19058f502c01dbe9619e9cf5a3e4a976e818\": container with ID starting with a7efc8f1ef22d26cfefe3c43133c19058f502c01dbe9619e9cf5a3e4a976e818 not found: ID does not exist" containerID="a7efc8f1ef22d26cfefe3c43133c19058f502c01dbe9619e9cf5a3e4a976e818" Sep 29 19:00:55 crc kubenswrapper[4792]: I0929 19:00:55.721746 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a7efc8f1ef22d26cfefe3c43133c19058f502c01dbe9619e9cf5a3e4a976e818"} err="failed to get container status \"a7efc8f1ef22d26cfefe3c43133c19058f502c01dbe9619e9cf5a3e4a976e818\": rpc error: code = NotFound desc = could not find container \"a7efc8f1ef22d26cfefe3c43133c19058f502c01dbe9619e9cf5a3e4a976e818\": container with ID starting with a7efc8f1ef22d26cfefe3c43133c19058f502c01dbe9619e9cf5a3e4a976e818 not found: ID does not exist" Sep 29 19:00:55 crc kubenswrapper[4792]: I0929 19:00:55.721773 4792 scope.go:117] "RemoveContainer" containerID="a6c83bf8aa059b878605c7727573a1b67348f900120e0f2162eedb7ffb45af5f" Sep 29 19:00:55 crc kubenswrapper[4792]: E0929 19:00:55.722250 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a6c83bf8aa059b878605c7727573a1b67348f900120e0f2162eedb7ffb45af5f\": container with ID starting with a6c83bf8aa059b878605c7727573a1b67348f900120e0f2162eedb7ffb45af5f not found: ID does not exist" containerID="a6c83bf8aa059b878605c7727573a1b67348f900120e0f2162eedb7ffb45af5f" Sep 29 19:00:55 crc kubenswrapper[4792]: I0929 19:00:55.722312 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a6c83bf8aa059b878605c7727573a1b67348f900120e0f2162eedb7ffb45af5f"} err="failed to get container status \"a6c83bf8aa059b878605c7727573a1b67348f900120e0f2162eedb7ffb45af5f\": rpc error: code = NotFound desc = could not find container \"a6c83bf8aa059b878605c7727573a1b67348f900120e0f2162eedb7ffb45af5f\": container with ID starting with a6c83bf8aa059b878605c7727573a1b67348f900120e0f2162eedb7ffb45af5f not found: ID does not exist" Sep 29 19:00:55 crc kubenswrapper[4792]: I0929 19:00:55.722344 4792 scope.go:117] "RemoveContainer" containerID="d4df5141117ee82288cfaca12256a8e952d2fedf4239c0ae5eaf254327076ae4" Sep 29 19:00:55 crc kubenswrapper[4792]: I0929 19:00:55.741098 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-rwhnq"] Sep 29 19:00:55 crc kubenswrapper[4792]: I0929 19:00:55.741488 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-rwhnq"] Sep 29 19:00:55 crc kubenswrapper[4792]: I0929 19:00:55.742293 4792 scope.go:117] "RemoveContainer" containerID="0a2bf2229314f16f943c0314ec0dbfbe7261126e2040610b1c8ef94769f07a47" Sep 29 19:00:55 crc kubenswrapper[4792]: I0929 19:00:55.768146 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-6r69d"] Sep 29 19:00:55 crc kubenswrapper[4792]: I0929 19:00:55.770574 4792 scope.go:117] "RemoveContainer" containerID="e6abf063928795bd8228eadfdeef10b45d8b13c61bd2dec5b8495a4c0f2d087b" Sep 29 19:00:55 crc kubenswrapper[4792]: I0929 19:00:55.771879 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-6r69d"] Sep 29 19:00:55 crc kubenswrapper[4792]: I0929 19:00:55.775249 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-wmjgp"] Sep 29 19:00:55 crc kubenswrapper[4792]: I0929 19:00:55.778398 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-wmjgp"] Sep 29 19:00:55 crc kubenswrapper[4792]: I0929 19:00:55.785276 4792 scope.go:117] "RemoveContainer" containerID="fc358f8ca2540c2cbe0752bb7b0b8288c8770fbd804f9eef43fea6ea2d103a6f" Sep 29 19:00:55 crc kubenswrapper[4792]: I0929 19:00:55.805128 4792 scope.go:117] "RemoveContainer" containerID="0c0d65218eefd43259d3addcf0d6fd34bd71915cdf7387e114268297dc9b58ab" Sep 29 19:00:55 crc kubenswrapper[4792]: I0929 19:00:55.821652 4792 scope.go:117] "RemoveContainer" containerID="227e2ab5fa1528a0678e41a5101423a9d3c21988c24453cb3f887a2e2832933e" Sep 29 19:00:55 crc kubenswrapper[4792]: I0929 19:00:55.836805 4792 scope.go:117] "RemoveContainer" containerID="12d411e78ee90c29e49da9680b84741acb133f64785b01d2c758c5ec329a5fd0" Sep 29 19:00:55 crc kubenswrapper[4792]: I0929 19:00:55.852310 4792 scope.go:117] "RemoveContainer" containerID="0c0d65218eefd43259d3addcf0d6fd34bd71915cdf7387e114268297dc9b58ab" Sep 29 19:00:55 crc kubenswrapper[4792]: E0929 19:00:55.853213 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0c0d65218eefd43259d3addcf0d6fd34bd71915cdf7387e114268297dc9b58ab\": container with ID starting with 0c0d65218eefd43259d3addcf0d6fd34bd71915cdf7387e114268297dc9b58ab not found: ID does not exist" containerID="0c0d65218eefd43259d3addcf0d6fd34bd71915cdf7387e114268297dc9b58ab" Sep 29 19:00:55 crc kubenswrapper[4792]: I0929 19:00:55.853246 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0c0d65218eefd43259d3addcf0d6fd34bd71915cdf7387e114268297dc9b58ab"} err="failed to get container status \"0c0d65218eefd43259d3addcf0d6fd34bd71915cdf7387e114268297dc9b58ab\": rpc error: code = NotFound desc = could not find container \"0c0d65218eefd43259d3addcf0d6fd34bd71915cdf7387e114268297dc9b58ab\": container with ID starting with 0c0d65218eefd43259d3addcf0d6fd34bd71915cdf7387e114268297dc9b58ab not found: ID does not exist" Sep 29 19:00:55 crc kubenswrapper[4792]: I0929 19:00:55.853271 4792 scope.go:117] "RemoveContainer" containerID="227e2ab5fa1528a0678e41a5101423a9d3c21988c24453cb3f887a2e2832933e" Sep 29 19:00:55 crc kubenswrapper[4792]: E0929 19:00:55.854326 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"227e2ab5fa1528a0678e41a5101423a9d3c21988c24453cb3f887a2e2832933e\": container with ID starting with 227e2ab5fa1528a0678e41a5101423a9d3c21988c24453cb3f887a2e2832933e not found: ID does not exist" containerID="227e2ab5fa1528a0678e41a5101423a9d3c21988c24453cb3f887a2e2832933e" Sep 29 19:00:55 crc kubenswrapper[4792]: I0929 19:00:55.854384 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"227e2ab5fa1528a0678e41a5101423a9d3c21988c24453cb3f887a2e2832933e"} err="failed to get container status \"227e2ab5fa1528a0678e41a5101423a9d3c21988c24453cb3f887a2e2832933e\": rpc error: code = NotFound desc = could not find container \"227e2ab5fa1528a0678e41a5101423a9d3c21988c24453cb3f887a2e2832933e\": container with ID starting with 227e2ab5fa1528a0678e41a5101423a9d3c21988c24453cb3f887a2e2832933e not found: ID does not exist" Sep 29 19:00:55 crc kubenswrapper[4792]: I0929 19:00:55.854423 4792 scope.go:117] "RemoveContainer" containerID="12d411e78ee90c29e49da9680b84741acb133f64785b01d2c758c5ec329a5fd0" Sep 29 19:00:55 crc kubenswrapper[4792]: E0929 19:00:55.854687 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"12d411e78ee90c29e49da9680b84741acb133f64785b01d2c758c5ec329a5fd0\": container with ID starting with 12d411e78ee90c29e49da9680b84741acb133f64785b01d2c758c5ec329a5fd0 not found: ID does not exist" containerID="12d411e78ee90c29e49da9680b84741acb133f64785b01d2c758c5ec329a5fd0" Sep 29 19:00:55 crc kubenswrapper[4792]: I0929 19:00:55.854714 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"12d411e78ee90c29e49da9680b84741acb133f64785b01d2c758c5ec329a5fd0"} err="failed to get container status \"12d411e78ee90c29e49da9680b84741acb133f64785b01d2c758c5ec329a5fd0\": rpc error: code = NotFound desc = could not find container \"12d411e78ee90c29e49da9680b84741acb133f64785b01d2c758c5ec329a5fd0\": container with ID starting with 12d411e78ee90c29e49da9680b84741acb133f64785b01d2c758c5ec329a5fd0 not found: ID does not exist" Sep 29 19:00:55 crc kubenswrapper[4792]: I0929 19:00:55.854729 4792 scope.go:117] "RemoveContainer" containerID="5a961a166e2e652bd3f74ea6ad271d1dbff21f3a6f515108e484304ade0d2048" Sep 29 19:00:55 crc kubenswrapper[4792]: I0929 19:00:55.870836 4792 scope.go:117] "RemoveContainer" containerID="6f84229f16b6c209e965485ad6e16aff40cca6f6b74e880250e15755dc87b664" Sep 29 19:00:55 crc kubenswrapper[4792]: I0929 19:00:55.897335 4792 scope.go:117] "RemoveContainer" containerID="89d1da23908d418782c64657fd64290c6db59f42bfba17548aed22f312f7f089" Sep 29 19:00:56 crc kubenswrapper[4792]: I0929 19:00:56.676593 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-mjdnk" Sep 29 19:00:56 crc kubenswrapper[4792]: I0929 19:00:56.702359 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-r5rqb"] Sep 29 19:00:56 crc kubenswrapper[4792]: E0929 19:00:56.702811 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="de0a9077-e8b8-4b2c-bfdf-4e965627f520" containerName="extract-content" Sep 29 19:00:56 crc kubenswrapper[4792]: I0929 19:00:56.702919 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="de0a9077-e8b8-4b2c-bfdf-4e965627f520" containerName="extract-content" Sep 29 19:00:56 crc kubenswrapper[4792]: E0929 19:00:56.703012 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="502b8b77-8c80-4cc0-8590-6fb9ce342289" containerName="marketplace-operator" Sep 29 19:00:56 crc kubenswrapper[4792]: I0929 19:00:56.703081 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="502b8b77-8c80-4cc0-8590-6fb9ce342289" containerName="marketplace-operator" Sep 29 19:00:56 crc kubenswrapper[4792]: E0929 19:00:56.703158 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="de0a9077-e8b8-4b2c-bfdf-4e965627f520" containerName="extract-utilities" Sep 29 19:00:56 crc kubenswrapper[4792]: I0929 19:00:56.703325 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="de0a9077-e8b8-4b2c-bfdf-4e965627f520" containerName="extract-utilities" Sep 29 19:00:56 crc kubenswrapper[4792]: E0929 19:00:56.703398 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f9df84a0-6eb0-415a-ae29-c93ba496f855" containerName="registry-server" Sep 29 19:00:56 crc kubenswrapper[4792]: I0929 19:00:56.703499 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="f9df84a0-6eb0-415a-ae29-c93ba496f855" containerName="registry-server" Sep 29 19:00:56 crc kubenswrapper[4792]: E0929 19:00:56.703574 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c4e9ba50-617f-4f99-9430-ee6fb3d21b8e" containerName="registry-server" Sep 29 19:00:56 crc kubenswrapper[4792]: I0929 19:00:56.703651 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="c4e9ba50-617f-4f99-9430-ee6fb3d21b8e" containerName="registry-server" Sep 29 19:00:56 crc kubenswrapper[4792]: E0929 19:00:56.703895 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f9df84a0-6eb0-415a-ae29-c93ba496f855" containerName="extract-content" Sep 29 19:00:56 crc kubenswrapper[4792]: I0929 19:00:56.703967 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="f9df84a0-6eb0-415a-ae29-c93ba496f855" containerName="extract-content" Sep 29 19:00:56 crc kubenswrapper[4792]: E0929 19:00:56.704027 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c4e9ba50-617f-4f99-9430-ee6fb3d21b8e" containerName="extract-utilities" Sep 29 19:00:56 crc kubenswrapper[4792]: I0929 19:00:56.704390 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="c4e9ba50-617f-4f99-9430-ee6fb3d21b8e" containerName="extract-utilities" Sep 29 19:00:56 crc kubenswrapper[4792]: E0929 19:00:56.704533 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="71994559-92a6-4331-bbbb-04e9d2498c9b" containerName="extract-utilities" Sep 29 19:00:56 crc kubenswrapper[4792]: I0929 19:00:56.704589 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="71994559-92a6-4331-bbbb-04e9d2498c9b" containerName="extract-utilities" Sep 29 19:00:56 crc kubenswrapper[4792]: E0929 19:00:56.704648 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="71994559-92a6-4331-bbbb-04e9d2498c9b" containerName="extract-content" Sep 29 19:00:56 crc kubenswrapper[4792]: I0929 19:00:56.704710 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="71994559-92a6-4331-bbbb-04e9d2498c9b" containerName="extract-content" Sep 29 19:00:56 crc kubenswrapper[4792]: E0929 19:00:56.704764 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="71994559-92a6-4331-bbbb-04e9d2498c9b" containerName="registry-server" Sep 29 19:00:56 crc kubenswrapper[4792]: I0929 19:00:56.704815 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="71994559-92a6-4331-bbbb-04e9d2498c9b" containerName="registry-server" Sep 29 19:00:56 crc kubenswrapper[4792]: E0929 19:00:56.704902 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c4e9ba50-617f-4f99-9430-ee6fb3d21b8e" containerName="extract-content" Sep 29 19:00:56 crc kubenswrapper[4792]: I0929 19:00:56.704960 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="c4e9ba50-617f-4f99-9430-ee6fb3d21b8e" containerName="extract-content" Sep 29 19:00:56 crc kubenswrapper[4792]: E0929 19:00:56.705025 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f9df84a0-6eb0-415a-ae29-c93ba496f855" containerName="extract-utilities" Sep 29 19:00:56 crc kubenswrapper[4792]: I0929 19:00:56.705079 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="f9df84a0-6eb0-415a-ae29-c93ba496f855" containerName="extract-utilities" Sep 29 19:00:56 crc kubenswrapper[4792]: E0929 19:00:56.705144 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="de0a9077-e8b8-4b2c-bfdf-4e965627f520" containerName="registry-server" Sep 29 19:00:56 crc kubenswrapper[4792]: I0929 19:00:56.705202 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="de0a9077-e8b8-4b2c-bfdf-4e965627f520" containerName="registry-server" Sep 29 19:00:56 crc kubenswrapper[4792]: I0929 19:00:56.705340 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="c4e9ba50-617f-4f99-9430-ee6fb3d21b8e" containerName="registry-server" Sep 29 19:00:56 crc kubenswrapper[4792]: I0929 19:00:56.705407 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="502b8b77-8c80-4cc0-8590-6fb9ce342289" containerName="marketplace-operator" Sep 29 19:00:56 crc kubenswrapper[4792]: I0929 19:00:56.705469 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="de0a9077-e8b8-4b2c-bfdf-4e965627f520" containerName="registry-server" Sep 29 19:00:56 crc kubenswrapper[4792]: I0929 19:00:56.705531 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="f9df84a0-6eb0-415a-ae29-c93ba496f855" containerName="registry-server" Sep 29 19:00:56 crc kubenswrapper[4792]: I0929 19:00:56.705588 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="71994559-92a6-4331-bbbb-04e9d2498c9b" containerName="registry-server" Sep 29 19:00:56 crc kubenswrapper[4792]: I0929 19:00:56.706319 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-r5rqb" Sep 29 19:00:56 crc kubenswrapper[4792]: I0929 19:00:56.709474 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Sep 29 19:00:56 crc kubenswrapper[4792]: I0929 19:00:56.718458 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-r5rqb"] Sep 29 19:00:56 crc kubenswrapper[4792]: I0929 19:00:56.788525 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e799b8b0-ae24-4880-ad4c-11dc094789f4-catalog-content\") pod \"redhat-marketplace-r5rqb\" (UID: \"e799b8b0-ae24-4880-ad4c-11dc094789f4\") " pod="openshift-marketplace/redhat-marketplace-r5rqb" Sep 29 19:00:56 crc kubenswrapper[4792]: I0929 19:00:56.788587 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hx27p\" (UniqueName: \"kubernetes.io/projected/e799b8b0-ae24-4880-ad4c-11dc094789f4-kube-api-access-hx27p\") pod \"redhat-marketplace-r5rqb\" (UID: \"e799b8b0-ae24-4880-ad4c-11dc094789f4\") " pod="openshift-marketplace/redhat-marketplace-r5rqb" Sep 29 19:00:56 crc kubenswrapper[4792]: I0929 19:00:56.789163 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e799b8b0-ae24-4880-ad4c-11dc094789f4-utilities\") pod \"redhat-marketplace-r5rqb\" (UID: \"e799b8b0-ae24-4880-ad4c-11dc094789f4\") " pod="openshift-marketplace/redhat-marketplace-r5rqb" Sep 29 19:00:56 crc kubenswrapper[4792]: I0929 19:00:56.890268 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e799b8b0-ae24-4880-ad4c-11dc094789f4-utilities\") pod \"redhat-marketplace-r5rqb\" (UID: \"e799b8b0-ae24-4880-ad4c-11dc094789f4\") " pod="openshift-marketplace/redhat-marketplace-r5rqb" Sep 29 19:00:56 crc kubenswrapper[4792]: I0929 19:00:56.890724 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e799b8b0-ae24-4880-ad4c-11dc094789f4-utilities\") pod \"redhat-marketplace-r5rqb\" (UID: \"e799b8b0-ae24-4880-ad4c-11dc094789f4\") " pod="openshift-marketplace/redhat-marketplace-r5rqb" Sep 29 19:00:56 crc kubenswrapper[4792]: I0929 19:00:56.890911 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e799b8b0-ae24-4880-ad4c-11dc094789f4-catalog-content\") pod \"redhat-marketplace-r5rqb\" (UID: \"e799b8b0-ae24-4880-ad4c-11dc094789f4\") " pod="openshift-marketplace/redhat-marketplace-r5rqb" Sep 29 19:00:56 crc kubenswrapper[4792]: I0929 19:00:56.891176 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hx27p\" (UniqueName: \"kubernetes.io/projected/e799b8b0-ae24-4880-ad4c-11dc094789f4-kube-api-access-hx27p\") pod \"redhat-marketplace-r5rqb\" (UID: \"e799b8b0-ae24-4880-ad4c-11dc094789f4\") " pod="openshift-marketplace/redhat-marketplace-r5rqb" Sep 29 19:00:56 crc kubenswrapper[4792]: I0929 19:00:56.891137 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e799b8b0-ae24-4880-ad4c-11dc094789f4-catalog-content\") pod \"redhat-marketplace-r5rqb\" (UID: \"e799b8b0-ae24-4880-ad4c-11dc094789f4\") " pod="openshift-marketplace/redhat-marketplace-r5rqb" Sep 29 19:00:56 crc kubenswrapper[4792]: I0929 19:00:56.902920 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-dh2pn"] Sep 29 19:00:56 crc kubenswrapper[4792]: I0929 19:00:56.903817 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-dh2pn" Sep 29 19:00:56 crc kubenswrapper[4792]: I0929 19:00:56.906936 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Sep 29 19:00:56 crc kubenswrapper[4792]: I0929 19:00:56.915598 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-dh2pn"] Sep 29 19:00:56 crc kubenswrapper[4792]: I0929 19:00:56.916071 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hx27p\" (UniqueName: \"kubernetes.io/projected/e799b8b0-ae24-4880-ad4c-11dc094789f4-kube-api-access-hx27p\") pod \"redhat-marketplace-r5rqb\" (UID: \"e799b8b0-ae24-4880-ad4c-11dc094789f4\") " pod="openshift-marketplace/redhat-marketplace-r5rqb" Sep 29 19:00:56 crc kubenswrapper[4792]: I0929 19:00:56.992448 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/74049985-d99b-416d-80a0-2f73a2253f79-catalog-content\") pod \"redhat-operators-dh2pn\" (UID: \"74049985-d99b-416d-80a0-2f73a2253f79\") " pod="openshift-marketplace/redhat-operators-dh2pn" Sep 29 19:00:56 crc kubenswrapper[4792]: I0929 19:00:56.992505 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/74049985-d99b-416d-80a0-2f73a2253f79-utilities\") pod \"redhat-operators-dh2pn\" (UID: \"74049985-d99b-416d-80a0-2f73a2253f79\") " pod="openshift-marketplace/redhat-operators-dh2pn" Sep 29 19:00:56 crc kubenswrapper[4792]: I0929 19:00:56.992536 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5pxjs\" (UniqueName: \"kubernetes.io/projected/74049985-d99b-416d-80a0-2f73a2253f79-kube-api-access-5pxjs\") pod \"redhat-operators-dh2pn\" (UID: \"74049985-d99b-416d-80a0-2f73a2253f79\") " pod="openshift-marketplace/redhat-operators-dh2pn" Sep 29 19:00:57 crc kubenswrapper[4792]: I0929 19:00:57.020956 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="502b8b77-8c80-4cc0-8590-6fb9ce342289" path="/var/lib/kubelet/pods/502b8b77-8c80-4cc0-8590-6fb9ce342289/volumes" Sep 29 19:00:57 crc kubenswrapper[4792]: I0929 19:00:57.021451 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="71994559-92a6-4331-bbbb-04e9d2498c9b" path="/var/lib/kubelet/pods/71994559-92a6-4331-bbbb-04e9d2498c9b/volumes" Sep 29 19:00:57 crc kubenswrapper[4792]: I0929 19:00:57.022011 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c4e9ba50-617f-4f99-9430-ee6fb3d21b8e" path="/var/lib/kubelet/pods/c4e9ba50-617f-4f99-9430-ee6fb3d21b8e/volumes" Sep 29 19:00:57 crc kubenswrapper[4792]: I0929 19:00:57.023124 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="de0a9077-e8b8-4b2c-bfdf-4e965627f520" path="/var/lib/kubelet/pods/de0a9077-e8b8-4b2c-bfdf-4e965627f520/volumes" Sep 29 19:00:57 crc kubenswrapper[4792]: I0929 19:00:57.023873 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f9df84a0-6eb0-415a-ae29-c93ba496f855" path="/var/lib/kubelet/pods/f9df84a0-6eb0-415a-ae29-c93ba496f855/volumes" Sep 29 19:00:57 crc kubenswrapper[4792]: I0929 19:00:57.024591 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-r5rqb" Sep 29 19:00:57 crc kubenswrapper[4792]: I0929 19:00:57.093516 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5pxjs\" (UniqueName: \"kubernetes.io/projected/74049985-d99b-416d-80a0-2f73a2253f79-kube-api-access-5pxjs\") pod \"redhat-operators-dh2pn\" (UID: \"74049985-d99b-416d-80a0-2f73a2253f79\") " pod="openshift-marketplace/redhat-operators-dh2pn" Sep 29 19:00:57 crc kubenswrapper[4792]: I0929 19:00:57.093649 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/74049985-d99b-416d-80a0-2f73a2253f79-catalog-content\") pod \"redhat-operators-dh2pn\" (UID: \"74049985-d99b-416d-80a0-2f73a2253f79\") " pod="openshift-marketplace/redhat-operators-dh2pn" Sep 29 19:00:57 crc kubenswrapper[4792]: I0929 19:00:57.093690 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/74049985-d99b-416d-80a0-2f73a2253f79-utilities\") pod \"redhat-operators-dh2pn\" (UID: \"74049985-d99b-416d-80a0-2f73a2253f79\") " pod="openshift-marketplace/redhat-operators-dh2pn" Sep 29 19:00:57 crc kubenswrapper[4792]: I0929 19:00:57.094601 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/74049985-d99b-416d-80a0-2f73a2253f79-utilities\") pod \"redhat-operators-dh2pn\" (UID: \"74049985-d99b-416d-80a0-2f73a2253f79\") " pod="openshift-marketplace/redhat-operators-dh2pn" Sep 29 19:00:57 crc kubenswrapper[4792]: I0929 19:00:57.095200 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/74049985-d99b-416d-80a0-2f73a2253f79-catalog-content\") pod \"redhat-operators-dh2pn\" (UID: \"74049985-d99b-416d-80a0-2f73a2253f79\") " pod="openshift-marketplace/redhat-operators-dh2pn" Sep 29 19:00:57 crc kubenswrapper[4792]: I0929 19:00:57.120081 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5pxjs\" (UniqueName: \"kubernetes.io/projected/74049985-d99b-416d-80a0-2f73a2253f79-kube-api-access-5pxjs\") pod \"redhat-operators-dh2pn\" (UID: \"74049985-d99b-416d-80a0-2f73a2253f79\") " pod="openshift-marketplace/redhat-operators-dh2pn" Sep 29 19:00:57 crc kubenswrapper[4792]: I0929 19:00:57.226270 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-r5rqb"] Sep 29 19:00:57 crc kubenswrapper[4792]: I0929 19:00:57.274167 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-dh2pn" Sep 29 19:00:57 crc kubenswrapper[4792]: I0929 19:00:57.665111 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-dh2pn"] Sep 29 19:00:57 crc kubenswrapper[4792]: I0929 19:00:57.688124 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-dh2pn" event={"ID":"74049985-d99b-416d-80a0-2f73a2253f79","Type":"ContainerStarted","Data":"cceff4f58000e953a42bf46e097cc21fb27ef09c4e4b6e5122dc3a23a9883ae6"} Sep 29 19:00:57 crc kubenswrapper[4792]: I0929 19:00:57.695284 4792 generic.go:334] "Generic (PLEG): container finished" podID="e799b8b0-ae24-4880-ad4c-11dc094789f4" containerID="f78629e3414b8d8c990b00ec055ac29dd9280617be4f60227f1204753ffdd8d5" exitCode=0 Sep 29 19:00:57 crc kubenswrapper[4792]: I0929 19:00:57.695991 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-r5rqb" event={"ID":"e799b8b0-ae24-4880-ad4c-11dc094789f4","Type":"ContainerDied","Data":"f78629e3414b8d8c990b00ec055ac29dd9280617be4f60227f1204753ffdd8d5"} Sep 29 19:00:57 crc kubenswrapper[4792]: I0929 19:00:57.696021 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-r5rqb" event={"ID":"e799b8b0-ae24-4880-ad4c-11dc094789f4","Type":"ContainerStarted","Data":"4ae4c2db37deacff44e960704d7cd10ac0d8fe292c1bcdbf945e1ca70da4e782"} Sep 29 19:00:58 crc kubenswrapper[4792]: I0929 19:00:58.701829 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-r5rqb" event={"ID":"e799b8b0-ae24-4880-ad4c-11dc094789f4","Type":"ContainerStarted","Data":"7ac95362e4f6615d4cda7bbd45a440cd879130aff41fa65ecb7751cf7de066dc"} Sep 29 19:00:58 crc kubenswrapper[4792]: I0929 19:00:58.704711 4792 generic.go:334] "Generic (PLEG): container finished" podID="74049985-d99b-416d-80a0-2f73a2253f79" containerID="2357e920462b56f929b4188a691f139b568a79334ac9943f33235e922d0040e1" exitCode=0 Sep 29 19:00:58 crc kubenswrapper[4792]: I0929 19:00:58.704753 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-dh2pn" event={"ID":"74049985-d99b-416d-80a0-2f73a2253f79","Type":"ContainerDied","Data":"2357e920462b56f929b4188a691f139b568a79334ac9943f33235e922d0040e1"} Sep 29 19:00:59 crc kubenswrapper[4792]: I0929 19:00:59.099125 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-7dj2p"] Sep 29 19:00:59 crc kubenswrapper[4792]: I0929 19:00:59.100300 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-7dj2p" Sep 29 19:00:59 crc kubenswrapper[4792]: I0929 19:00:59.103382 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Sep 29 19:00:59 crc kubenswrapper[4792]: I0929 19:00:59.115452 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kckjh\" (UniqueName: \"kubernetes.io/projected/a3990f16-15c9-49ab-9e7c-ded88e6f7043-kube-api-access-kckjh\") pod \"certified-operators-7dj2p\" (UID: \"a3990f16-15c9-49ab-9e7c-ded88e6f7043\") " pod="openshift-marketplace/certified-operators-7dj2p" Sep 29 19:00:59 crc kubenswrapper[4792]: I0929 19:00:59.115503 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a3990f16-15c9-49ab-9e7c-ded88e6f7043-utilities\") pod \"certified-operators-7dj2p\" (UID: \"a3990f16-15c9-49ab-9e7c-ded88e6f7043\") " pod="openshift-marketplace/certified-operators-7dj2p" Sep 29 19:00:59 crc kubenswrapper[4792]: I0929 19:00:59.115554 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a3990f16-15c9-49ab-9e7c-ded88e6f7043-catalog-content\") pod \"certified-operators-7dj2p\" (UID: \"a3990f16-15c9-49ab-9e7c-ded88e6f7043\") " pod="openshift-marketplace/certified-operators-7dj2p" Sep 29 19:00:59 crc kubenswrapper[4792]: I0929 19:00:59.118441 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-7dj2p"] Sep 29 19:00:59 crc kubenswrapper[4792]: I0929 19:00:59.216643 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a3990f16-15c9-49ab-9e7c-ded88e6f7043-utilities\") pod \"certified-operators-7dj2p\" (UID: \"a3990f16-15c9-49ab-9e7c-ded88e6f7043\") " pod="openshift-marketplace/certified-operators-7dj2p" Sep 29 19:00:59 crc kubenswrapper[4792]: I0929 19:00:59.217361 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kckjh\" (UniqueName: \"kubernetes.io/projected/a3990f16-15c9-49ab-9e7c-ded88e6f7043-kube-api-access-kckjh\") pod \"certified-operators-7dj2p\" (UID: \"a3990f16-15c9-49ab-9e7c-ded88e6f7043\") " pod="openshift-marketplace/certified-operators-7dj2p" Sep 29 19:00:59 crc kubenswrapper[4792]: I0929 19:00:59.217402 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a3990f16-15c9-49ab-9e7c-ded88e6f7043-catalog-content\") pod \"certified-operators-7dj2p\" (UID: \"a3990f16-15c9-49ab-9e7c-ded88e6f7043\") " pod="openshift-marketplace/certified-operators-7dj2p" Sep 29 19:00:59 crc kubenswrapper[4792]: I0929 19:00:59.218006 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a3990f16-15c9-49ab-9e7c-ded88e6f7043-catalog-content\") pod \"certified-operators-7dj2p\" (UID: \"a3990f16-15c9-49ab-9e7c-ded88e6f7043\") " pod="openshift-marketplace/certified-operators-7dj2p" Sep 29 19:00:59 crc kubenswrapper[4792]: I0929 19:00:59.218177 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a3990f16-15c9-49ab-9e7c-ded88e6f7043-utilities\") pod \"certified-operators-7dj2p\" (UID: \"a3990f16-15c9-49ab-9e7c-ded88e6f7043\") " pod="openshift-marketplace/certified-operators-7dj2p" Sep 29 19:00:59 crc kubenswrapper[4792]: I0929 19:00:59.237944 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kckjh\" (UniqueName: \"kubernetes.io/projected/a3990f16-15c9-49ab-9e7c-ded88e6f7043-kube-api-access-kckjh\") pod \"certified-operators-7dj2p\" (UID: \"a3990f16-15c9-49ab-9e7c-ded88e6f7043\") " pod="openshift-marketplace/certified-operators-7dj2p" Sep 29 19:00:59 crc kubenswrapper[4792]: I0929 19:00:59.301343 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-dbs72"] Sep 29 19:00:59 crc kubenswrapper[4792]: I0929 19:00:59.304800 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-dbs72" Sep 29 19:00:59 crc kubenswrapper[4792]: I0929 19:00:59.308318 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Sep 29 19:00:59 crc kubenswrapper[4792]: I0929 19:00:59.318636 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c175ab0e-c63b-4263-bca3-ec28d3165c93-catalog-content\") pod \"community-operators-dbs72\" (UID: \"c175ab0e-c63b-4263-bca3-ec28d3165c93\") " pod="openshift-marketplace/community-operators-dbs72" Sep 29 19:00:59 crc kubenswrapper[4792]: I0929 19:00:59.319073 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c175ab0e-c63b-4263-bca3-ec28d3165c93-utilities\") pod \"community-operators-dbs72\" (UID: \"c175ab0e-c63b-4263-bca3-ec28d3165c93\") " pod="openshift-marketplace/community-operators-dbs72" Sep 29 19:00:59 crc kubenswrapper[4792]: I0929 19:00:59.319113 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f9qwk\" (UniqueName: \"kubernetes.io/projected/c175ab0e-c63b-4263-bca3-ec28d3165c93-kube-api-access-f9qwk\") pod \"community-operators-dbs72\" (UID: \"c175ab0e-c63b-4263-bca3-ec28d3165c93\") " pod="openshift-marketplace/community-operators-dbs72" Sep 29 19:00:59 crc kubenswrapper[4792]: I0929 19:00:59.349733 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-dbs72"] Sep 29 19:00:59 crc kubenswrapper[4792]: I0929 19:00:59.416269 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-7dj2p" Sep 29 19:00:59 crc kubenswrapper[4792]: I0929 19:00:59.419558 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f9qwk\" (UniqueName: \"kubernetes.io/projected/c175ab0e-c63b-4263-bca3-ec28d3165c93-kube-api-access-f9qwk\") pod \"community-operators-dbs72\" (UID: \"c175ab0e-c63b-4263-bca3-ec28d3165c93\") " pod="openshift-marketplace/community-operators-dbs72" Sep 29 19:00:59 crc kubenswrapper[4792]: I0929 19:00:59.419675 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c175ab0e-c63b-4263-bca3-ec28d3165c93-utilities\") pod \"community-operators-dbs72\" (UID: \"c175ab0e-c63b-4263-bca3-ec28d3165c93\") " pod="openshift-marketplace/community-operators-dbs72" Sep 29 19:00:59 crc kubenswrapper[4792]: I0929 19:00:59.419693 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c175ab0e-c63b-4263-bca3-ec28d3165c93-catalog-content\") pod \"community-operators-dbs72\" (UID: \"c175ab0e-c63b-4263-bca3-ec28d3165c93\") " pod="openshift-marketplace/community-operators-dbs72" Sep 29 19:00:59 crc kubenswrapper[4792]: I0929 19:00:59.420359 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c175ab0e-c63b-4263-bca3-ec28d3165c93-catalog-content\") pod \"community-operators-dbs72\" (UID: \"c175ab0e-c63b-4263-bca3-ec28d3165c93\") " pod="openshift-marketplace/community-operators-dbs72" Sep 29 19:00:59 crc kubenswrapper[4792]: I0929 19:00:59.421584 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c175ab0e-c63b-4263-bca3-ec28d3165c93-utilities\") pod \"community-operators-dbs72\" (UID: \"c175ab0e-c63b-4263-bca3-ec28d3165c93\") " pod="openshift-marketplace/community-operators-dbs72" Sep 29 19:00:59 crc kubenswrapper[4792]: I0929 19:00:59.443221 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f9qwk\" (UniqueName: \"kubernetes.io/projected/c175ab0e-c63b-4263-bca3-ec28d3165c93-kube-api-access-f9qwk\") pod \"community-operators-dbs72\" (UID: \"c175ab0e-c63b-4263-bca3-ec28d3165c93\") " pod="openshift-marketplace/community-operators-dbs72" Sep 29 19:00:59 crc kubenswrapper[4792]: I0929 19:00:59.656320 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-dbs72" Sep 29 19:00:59 crc kubenswrapper[4792]: I0929 19:00:59.674583 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-7dj2p"] Sep 29 19:00:59 crc kubenswrapper[4792]: I0929 19:00:59.720811 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-dh2pn" event={"ID":"74049985-d99b-416d-80a0-2f73a2253f79","Type":"ContainerStarted","Data":"8b16c6bc68d3041120c75a67a1aeff35d441e60cea382ef81826d420a452a325"} Sep 29 19:00:59 crc kubenswrapper[4792]: I0929 19:00:59.729281 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-r5rqb" event={"ID":"e799b8b0-ae24-4880-ad4c-11dc094789f4","Type":"ContainerDied","Data":"7ac95362e4f6615d4cda7bbd45a440cd879130aff41fa65ecb7751cf7de066dc"} Sep 29 19:00:59 crc kubenswrapper[4792]: I0929 19:00:59.729232 4792 generic.go:334] "Generic (PLEG): container finished" podID="e799b8b0-ae24-4880-ad4c-11dc094789f4" containerID="7ac95362e4f6615d4cda7bbd45a440cd879130aff41fa65ecb7751cf7de066dc" exitCode=0 Sep 29 19:00:59 crc kubenswrapper[4792]: I0929 19:00:59.729387 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-r5rqb" event={"ID":"e799b8b0-ae24-4880-ad4c-11dc094789f4","Type":"ContainerStarted","Data":"d14519659b3791c277c9637a75407517beaed4dc1bf48cc8b1244f10eedd8cb3"} Sep 29 19:00:59 crc kubenswrapper[4792]: I0929 19:00:59.732567 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-7dj2p" event={"ID":"a3990f16-15c9-49ab-9e7c-ded88e6f7043","Type":"ContainerStarted","Data":"cf8f3e335af049dee92d1845cec7989f1e120072f989a26ba7bd759e53e939c2"} Sep 29 19:00:59 crc kubenswrapper[4792]: I0929 19:00:59.797640 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-r5rqb" podStartSLOduration=2.236246882 podStartE2EDuration="3.797528987s" podCreationTimestamp="2025-09-29 19:00:56 +0000 UTC" firstStartedPulling="2025-09-29 19:00:57.700125611 +0000 UTC m=+269.693433007" lastFinishedPulling="2025-09-29 19:00:59.261407716 +0000 UTC m=+271.254715112" observedRunningTime="2025-09-29 19:00:59.788573805 +0000 UTC m=+271.781881201" watchObservedRunningTime="2025-09-29 19:00:59.797528987 +0000 UTC m=+271.790836393" Sep 29 19:00:59 crc kubenswrapper[4792]: I0929 19:00:59.915368 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-dbs72"] Sep 29 19:01:00 crc kubenswrapper[4792]: I0929 19:01:00.739628 4792 generic.go:334] "Generic (PLEG): container finished" podID="a3990f16-15c9-49ab-9e7c-ded88e6f7043" containerID="d671ce49694721e8c2b141a784023a84df45e1a9060d0e654e80dd90acc895b1" exitCode=0 Sep 29 19:01:00 crc kubenswrapper[4792]: I0929 19:01:00.739685 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-7dj2p" event={"ID":"a3990f16-15c9-49ab-9e7c-ded88e6f7043","Type":"ContainerDied","Data":"d671ce49694721e8c2b141a784023a84df45e1a9060d0e654e80dd90acc895b1"} Sep 29 19:01:00 crc kubenswrapper[4792]: I0929 19:01:00.746243 4792 generic.go:334] "Generic (PLEG): container finished" podID="c175ab0e-c63b-4263-bca3-ec28d3165c93" containerID="54bb71be0666866a5bf7b3351eb558300a58bed1db1102cd4d79cb6f2ede7046" exitCode=0 Sep 29 19:01:00 crc kubenswrapper[4792]: I0929 19:01:00.746292 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-dbs72" event={"ID":"c175ab0e-c63b-4263-bca3-ec28d3165c93","Type":"ContainerDied","Data":"54bb71be0666866a5bf7b3351eb558300a58bed1db1102cd4d79cb6f2ede7046"} Sep 29 19:01:00 crc kubenswrapper[4792]: I0929 19:01:00.746332 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-dbs72" event={"ID":"c175ab0e-c63b-4263-bca3-ec28d3165c93","Type":"ContainerStarted","Data":"b5e48385d55c76a2906225ac4df3d936711815a1de9574f0e06037ae7b2ccab9"} Sep 29 19:01:00 crc kubenswrapper[4792]: I0929 19:01:00.750157 4792 generic.go:334] "Generic (PLEG): container finished" podID="74049985-d99b-416d-80a0-2f73a2253f79" containerID="8b16c6bc68d3041120c75a67a1aeff35d441e60cea382ef81826d420a452a325" exitCode=0 Sep 29 19:01:00 crc kubenswrapper[4792]: I0929 19:01:00.750363 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-dh2pn" event={"ID":"74049985-d99b-416d-80a0-2f73a2253f79","Type":"ContainerDied","Data":"8b16c6bc68d3041120c75a67a1aeff35d441e60cea382ef81826d420a452a325"} Sep 29 19:01:02 crc kubenswrapper[4792]: I0929 19:01:02.760402 4792 generic.go:334] "Generic (PLEG): container finished" podID="a3990f16-15c9-49ab-9e7c-ded88e6f7043" containerID="b1891b73bba7eb71e7f0f672c657c8380d8b9db41239a002a51326b773b1f8e4" exitCode=0 Sep 29 19:01:02 crc kubenswrapper[4792]: I0929 19:01:02.761873 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-7dj2p" event={"ID":"a3990f16-15c9-49ab-9e7c-ded88e6f7043","Type":"ContainerDied","Data":"b1891b73bba7eb71e7f0f672c657c8380d8b9db41239a002a51326b773b1f8e4"} Sep 29 19:01:02 crc kubenswrapper[4792]: I0929 19:01:02.766950 4792 generic.go:334] "Generic (PLEG): container finished" podID="c175ab0e-c63b-4263-bca3-ec28d3165c93" containerID="0ab637b94f06d724b4273ffa43ad7347d0a35a8e683522e73d091165979ae3a3" exitCode=0 Sep 29 19:01:02 crc kubenswrapper[4792]: I0929 19:01:02.767024 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-dbs72" event={"ID":"c175ab0e-c63b-4263-bca3-ec28d3165c93","Type":"ContainerDied","Data":"0ab637b94f06d724b4273ffa43ad7347d0a35a8e683522e73d091165979ae3a3"} Sep 29 19:01:02 crc kubenswrapper[4792]: I0929 19:01:02.774280 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-dh2pn" event={"ID":"74049985-d99b-416d-80a0-2f73a2253f79","Type":"ContainerStarted","Data":"80011613633dc2f6197a34ed471199d4d6201ce78988962f3248b80cb3e6227f"} Sep 29 19:01:02 crc kubenswrapper[4792]: I0929 19:01:02.818412 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-dh2pn" podStartSLOduration=3.868222636 podStartE2EDuration="6.818398975s" podCreationTimestamp="2025-09-29 19:00:56 +0000 UTC" firstStartedPulling="2025-09-29 19:00:58.706002226 +0000 UTC m=+270.699309622" lastFinishedPulling="2025-09-29 19:01:01.656178565 +0000 UTC m=+273.649485961" observedRunningTime="2025-09-29 19:01:02.817582174 +0000 UTC m=+274.810889570" watchObservedRunningTime="2025-09-29 19:01:02.818398975 +0000 UTC m=+274.811706371" Sep 29 19:01:04 crc kubenswrapper[4792]: I0929 19:01:04.786608 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-7dj2p" event={"ID":"a3990f16-15c9-49ab-9e7c-ded88e6f7043","Type":"ContainerStarted","Data":"49dc9fdfb06a3f305ea23a68676b67b9e582c24f05356271acfbd45ccd37887c"} Sep 29 19:01:04 crc kubenswrapper[4792]: I0929 19:01:04.806223 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-7dj2p" podStartSLOduration=2.093746553 podStartE2EDuration="5.806207245s" podCreationTimestamp="2025-09-29 19:00:59 +0000 UTC" firstStartedPulling="2025-09-29 19:01:00.741333345 +0000 UTC m=+272.734640741" lastFinishedPulling="2025-09-29 19:01:04.453793997 +0000 UTC m=+276.447101433" observedRunningTime="2025-09-29 19:01:04.802620212 +0000 UTC m=+276.795927608" watchObservedRunningTime="2025-09-29 19:01:04.806207245 +0000 UTC m=+276.799514641" Sep 29 19:01:06 crc kubenswrapper[4792]: I0929 19:01:06.802186 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-dbs72" event={"ID":"c175ab0e-c63b-4263-bca3-ec28d3165c93","Type":"ContainerStarted","Data":"8603c7e294ddeddead0c1b61fb610ed0ec14127085003e732d3acc0d2c4b03d2"} Sep 29 19:01:06 crc kubenswrapper[4792]: I0929 19:01:06.829412 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-dbs72" podStartSLOduration=3.712242639 podStartE2EDuration="7.829397411s" podCreationTimestamp="2025-09-29 19:00:59 +0000 UTC" firstStartedPulling="2025-09-29 19:01:00.748052739 +0000 UTC m=+272.741360135" lastFinishedPulling="2025-09-29 19:01:04.865207511 +0000 UTC m=+276.858514907" observedRunningTime="2025-09-29 19:01:06.820970153 +0000 UTC m=+278.814277559" watchObservedRunningTime="2025-09-29 19:01:06.829397411 +0000 UTC m=+278.822704797" Sep 29 19:01:07 crc kubenswrapper[4792]: I0929 19:01:07.028593 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-r5rqb" Sep 29 19:01:07 crc kubenswrapper[4792]: I0929 19:01:07.028897 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-r5rqb" Sep 29 19:01:07 crc kubenswrapper[4792]: I0929 19:01:07.063806 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-r5rqb" Sep 29 19:01:07 crc kubenswrapper[4792]: I0929 19:01:07.275397 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-dh2pn" Sep 29 19:01:07 crc kubenswrapper[4792]: I0929 19:01:07.275463 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-dh2pn" Sep 29 19:01:07 crc kubenswrapper[4792]: I0929 19:01:07.335989 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-dh2pn" Sep 29 19:01:07 crc kubenswrapper[4792]: I0929 19:01:07.853150 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-dh2pn" Sep 29 19:01:07 crc kubenswrapper[4792]: I0929 19:01:07.872957 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-r5rqb" Sep 29 19:01:09 crc kubenswrapper[4792]: I0929 19:01:09.417064 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-7dj2p" Sep 29 19:01:09 crc kubenswrapper[4792]: I0929 19:01:09.417115 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-7dj2p" Sep 29 19:01:09 crc kubenswrapper[4792]: I0929 19:01:09.455337 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-7dj2p" Sep 29 19:01:09 crc kubenswrapper[4792]: I0929 19:01:09.656931 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-dbs72" Sep 29 19:01:09 crc kubenswrapper[4792]: I0929 19:01:09.657432 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-dbs72" Sep 29 19:01:09 crc kubenswrapper[4792]: I0929 19:01:09.693044 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-dbs72" Sep 29 19:01:09 crc kubenswrapper[4792]: I0929 19:01:09.852465 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-7dj2p" Sep 29 19:01:10 crc kubenswrapper[4792]: I0929 19:01:10.874961 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-dbs72" Sep 29 19:02:41 crc kubenswrapper[4792]: I0929 19:02:41.959710 4792 patch_prober.go:28] interesting pod/machine-config-daemon-p5q59 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 19:02:41 crc kubenswrapper[4792]: I0929 19:02:41.960373 4792 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" podUID="0ae66548-086e-4ca9-bd6f-281ce46e7557" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 19:03:11 crc kubenswrapper[4792]: I0929 19:03:11.961498 4792 patch_prober.go:28] interesting pod/machine-config-daemon-p5q59 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 19:03:11 crc kubenswrapper[4792]: I0929 19:03:11.962460 4792 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" podUID="0ae66548-086e-4ca9-bd6f-281ce46e7557" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 19:03:22 crc kubenswrapper[4792]: I0929 19:03:22.454186 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-bwqdk"] Sep 29 19:03:22 crc kubenswrapper[4792]: I0929 19:03:22.455243 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-66df7c8f76-bwqdk" Sep 29 19:03:22 crc kubenswrapper[4792]: I0929 19:03:22.481474 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-bwqdk"] Sep 29 19:03:22 crc kubenswrapper[4792]: I0929 19:03:22.586659 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/81895052-4681-486a-b8aa-9082e9856e5d-registry-certificates\") pod \"image-registry-66df7c8f76-bwqdk\" (UID: \"81895052-4681-486a-b8aa-9082e9856e5d\") " pod="openshift-image-registry/image-registry-66df7c8f76-bwqdk" Sep 29 19:03:22 crc kubenswrapper[4792]: I0929 19:03:22.586736 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/81895052-4681-486a-b8aa-9082e9856e5d-trusted-ca\") pod \"image-registry-66df7c8f76-bwqdk\" (UID: \"81895052-4681-486a-b8aa-9082e9856e5d\") " pod="openshift-image-registry/image-registry-66df7c8f76-bwqdk" Sep 29 19:03:22 crc kubenswrapper[4792]: I0929 19:03:22.586766 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/81895052-4681-486a-b8aa-9082e9856e5d-bound-sa-token\") pod \"image-registry-66df7c8f76-bwqdk\" (UID: \"81895052-4681-486a-b8aa-9082e9856e5d\") " pod="openshift-image-registry/image-registry-66df7c8f76-bwqdk" Sep 29 19:03:22 crc kubenswrapper[4792]: I0929 19:03:22.586938 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-65tc7\" (UniqueName: \"kubernetes.io/projected/81895052-4681-486a-b8aa-9082e9856e5d-kube-api-access-65tc7\") pod \"image-registry-66df7c8f76-bwqdk\" (UID: \"81895052-4681-486a-b8aa-9082e9856e5d\") " pod="openshift-image-registry/image-registry-66df7c8f76-bwqdk" Sep 29 19:03:22 crc kubenswrapper[4792]: I0929 19:03:22.586986 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/81895052-4681-486a-b8aa-9082e9856e5d-registry-tls\") pod \"image-registry-66df7c8f76-bwqdk\" (UID: \"81895052-4681-486a-b8aa-9082e9856e5d\") " pod="openshift-image-registry/image-registry-66df7c8f76-bwqdk" Sep 29 19:03:22 crc kubenswrapper[4792]: I0929 19:03:22.587067 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/81895052-4681-486a-b8aa-9082e9856e5d-ca-trust-extracted\") pod \"image-registry-66df7c8f76-bwqdk\" (UID: \"81895052-4681-486a-b8aa-9082e9856e5d\") " pod="openshift-image-registry/image-registry-66df7c8f76-bwqdk" Sep 29 19:03:22 crc kubenswrapper[4792]: I0929 19:03:22.587096 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/81895052-4681-486a-b8aa-9082e9856e5d-installation-pull-secrets\") pod \"image-registry-66df7c8f76-bwqdk\" (UID: \"81895052-4681-486a-b8aa-9082e9856e5d\") " pod="openshift-image-registry/image-registry-66df7c8f76-bwqdk" Sep 29 19:03:22 crc kubenswrapper[4792]: I0929 19:03:22.587140 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-66df7c8f76-bwqdk\" (UID: \"81895052-4681-486a-b8aa-9082e9856e5d\") " pod="openshift-image-registry/image-registry-66df7c8f76-bwqdk" Sep 29 19:03:22 crc kubenswrapper[4792]: I0929 19:03:22.615493 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-66df7c8f76-bwqdk\" (UID: \"81895052-4681-486a-b8aa-9082e9856e5d\") " pod="openshift-image-registry/image-registry-66df7c8f76-bwqdk" Sep 29 19:03:22 crc kubenswrapper[4792]: I0929 19:03:22.688720 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/81895052-4681-486a-b8aa-9082e9856e5d-installation-pull-secrets\") pod \"image-registry-66df7c8f76-bwqdk\" (UID: \"81895052-4681-486a-b8aa-9082e9856e5d\") " pod="openshift-image-registry/image-registry-66df7c8f76-bwqdk" Sep 29 19:03:22 crc kubenswrapper[4792]: I0929 19:03:22.688789 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/81895052-4681-486a-b8aa-9082e9856e5d-registry-certificates\") pod \"image-registry-66df7c8f76-bwqdk\" (UID: \"81895052-4681-486a-b8aa-9082e9856e5d\") " pod="openshift-image-registry/image-registry-66df7c8f76-bwqdk" Sep 29 19:03:22 crc kubenswrapper[4792]: I0929 19:03:22.688826 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/81895052-4681-486a-b8aa-9082e9856e5d-trusted-ca\") pod \"image-registry-66df7c8f76-bwqdk\" (UID: \"81895052-4681-486a-b8aa-9082e9856e5d\") " pod="openshift-image-registry/image-registry-66df7c8f76-bwqdk" Sep 29 19:03:22 crc kubenswrapper[4792]: I0929 19:03:22.688862 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/81895052-4681-486a-b8aa-9082e9856e5d-bound-sa-token\") pod \"image-registry-66df7c8f76-bwqdk\" (UID: \"81895052-4681-486a-b8aa-9082e9856e5d\") " pod="openshift-image-registry/image-registry-66df7c8f76-bwqdk" Sep 29 19:03:22 crc kubenswrapper[4792]: I0929 19:03:22.688897 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-65tc7\" (UniqueName: \"kubernetes.io/projected/81895052-4681-486a-b8aa-9082e9856e5d-kube-api-access-65tc7\") pod \"image-registry-66df7c8f76-bwqdk\" (UID: \"81895052-4681-486a-b8aa-9082e9856e5d\") " pod="openshift-image-registry/image-registry-66df7c8f76-bwqdk" Sep 29 19:03:22 crc kubenswrapper[4792]: I0929 19:03:22.688931 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/81895052-4681-486a-b8aa-9082e9856e5d-registry-tls\") pod \"image-registry-66df7c8f76-bwqdk\" (UID: \"81895052-4681-486a-b8aa-9082e9856e5d\") " pod="openshift-image-registry/image-registry-66df7c8f76-bwqdk" Sep 29 19:03:22 crc kubenswrapper[4792]: I0929 19:03:22.688977 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/81895052-4681-486a-b8aa-9082e9856e5d-ca-trust-extracted\") pod \"image-registry-66df7c8f76-bwqdk\" (UID: \"81895052-4681-486a-b8aa-9082e9856e5d\") " pod="openshift-image-registry/image-registry-66df7c8f76-bwqdk" Sep 29 19:03:22 crc kubenswrapper[4792]: I0929 19:03:22.689407 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/81895052-4681-486a-b8aa-9082e9856e5d-ca-trust-extracted\") pod \"image-registry-66df7c8f76-bwqdk\" (UID: \"81895052-4681-486a-b8aa-9082e9856e5d\") " pod="openshift-image-registry/image-registry-66df7c8f76-bwqdk" Sep 29 19:03:22 crc kubenswrapper[4792]: I0929 19:03:22.690478 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/81895052-4681-486a-b8aa-9082e9856e5d-trusted-ca\") pod \"image-registry-66df7c8f76-bwqdk\" (UID: \"81895052-4681-486a-b8aa-9082e9856e5d\") " pod="openshift-image-registry/image-registry-66df7c8f76-bwqdk" Sep 29 19:03:22 crc kubenswrapper[4792]: I0929 19:03:22.690575 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/81895052-4681-486a-b8aa-9082e9856e5d-registry-certificates\") pod \"image-registry-66df7c8f76-bwqdk\" (UID: \"81895052-4681-486a-b8aa-9082e9856e5d\") " pod="openshift-image-registry/image-registry-66df7c8f76-bwqdk" Sep 29 19:03:22 crc kubenswrapper[4792]: I0929 19:03:22.694556 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/81895052-4681-486a-b8aa-9082e9856e5d-installation-pull-secrets\") pod \"image-registry-66df7c8f76-bwqdk\" (UID: \"81895052-4681-486a-b8aa-9082e9856e5d\") " pod="openshift-image-registry/image-registry-66df7c8f76-bwqdk" Sep 29 19:03:22 crc kubenswrapper[4792]: I0929 19:03:22.694567 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/81895052-4681-486a-b8aa-9082e9856e5d-registry-tls\") pod \"image-registry-66df7c8f76-bwqdk\" (UID: \"81895052-4681-486a-b8aa-9082e9856e5d\") " pod="openshift-image-registry/image-registry-66df7c8f76-bwqdk" Sep 29 19:03:22 crc kubenswrapper[4792]: I0929 19:03:22.705826 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/81895052-4681-486a-b8aa-9082e9856e5d-bound-sa-token\") pod \"image-registry-66df7c8f76-bwqdk\" (UID: \"81895052-4681-486a-b8aa-9082e9856e5d\") " pod="openshift-image-registry/image-registry-66df7c8f76-bwqdk" Sep 29 19:03:22 crc kubenswrapper[4792]: I0929 19:03:22.706013 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-65tc7\" (UniqueName: \"kubernetes.io/projected/81895052-4681-486a-b8aa-9082e9856e5d-kube-api-access-65tc7\") pod \"image-registry-66df7c8f76-bwqdk\" (UID: \"81895052-4681-486a-b8aa-9082e9856e5d\") " pod="openshift-image-registry/image-registry-66df7c8f76-bwqdk" Sep 29 19:03:22 crc kubenswrapper[4792]: I0929 19:03:22.774902 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-66df7c8f76-bwqdk" Sep 29 19:03:22 crc kubenswrapper[4792]: I0929 19:03:22.952416 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-bwqdk"] Sep 29 19:03:23 crc kubenswrapper[4792]: I0929 19:03:23.654223 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-66df7c8f76-bwqdk" event={"ID":"81895052-4681-486a-b8aa-9082e9856e5d","Type":"ContainerStarted","Data":"bf3138b825977073fd9136773b440b8807249c58dd86d7772f741145379b1867"} Sep 29 19:03:23 crc kubenswrapper[4792]: I0929 19:03:23.654544 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-66df7c8f76-bwqdk" event={"ID":"81895052-4681-486a-b8aa-9082e9856e5d","Type":"ContainerStarted","Data":"cef965cffe1376f33973a0680512a6d9d31ee46ae72a39625cbf337097c8c553"} Sep 29 19:03:23 crc kubenswrapper[4792]: I0929 19:03:23.655523 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-image-registry/image-registry-66df7c8f76-bwqdk" Sep 29 19:03:23 crc kubenswrapper[4792]: I0929 19:03:23.676998 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-registry-66df7c8f76-bwqdk" podStartSLOduration=1.676978949 podStartE2EDuration="1.676978949s" podCreationTimestamp="2025-09-29 19:03:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 19:03:23.675675963 +0000 UTC m=+415.668983379" watchObservedRunningTime="2025-09-29 19:03:23.676978949 +0000 UTC m=+415.670286345" Sep 29 19:03:41 crc kubenswrapper[4792]: I0929 19:03:41.959560 4792 patch_prober.go:28] interesting pod/machine-config-daemon-p5q59 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 19:03:41 crc kubenswrapper[4792]: I0929 19:03:41.960111 4792 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" podUID="0ae66548-086e-4ca9-bd6f-281ce46e7557" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 19:03:41 crc kubenswrapper[4792]: I0929 19:03:41.960176 4792 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" Sep 29 19:03:41 crc kubenswrapper[4792]: I0929 19:03:41.960918 4792 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"7d8c0b7ec3035efa9edf9d9b2ba12dabade2b1415013394067aca0438b434980"} pod="openshift-machine-config-operator/machine-config-daemon-p5q59" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 29 19:03:41 crc kubenswrapper[4792]: I0929 19:03:41.960980 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" podUID="0ae66548-086e-4ca9-bd6f-281ce46e7557" containerName="machine-config-daemon" containerID="cri-o://7d8c0b7ec3035efa9edf9d9b2ba12dabade2b1415013394067aca0438b434980" gracePeriod=600 Sep 29 19:03:42 crc kubenswrapper[4792]: I0929 19:03:42.775020 4792 generic.go:334] "Generic (PLEG): container finished" podID="0ae66548-086e-4ca9-bd6f-281ce46e7557" containerID="7d8c0b7ec3035efa9edf9d9b2ba12dabade2b1415013394067aca0438b434980" exitCode=0 Sep 29 19:03:42 crc kubenswrapper[4792]: I0929 19:03:42.775101 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" event={"ID":"0ae66548-086e-4ca9-bd6f-281ce46e7557","Type":"ContainerDied","Data":"7d8c0b7ec3035efa9edf9d9b2ba12dabade2b1415013394067aca0438b434980"} Sep 29 19:03:42 crc kubenswrapper[4792]: I0929 19:03:42.775753 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" event={"ID":"0ae66548-086e-4ca9-bd6f-281ce46e7557","Type":"ContainerStarted","Data":"28e9b336e995bb00d35a92fadb8e3b916142bf2b43240549bb14e32ddcc21015"} Sep 29 19:03:42 crc kubenswrapper[4792]: I0929 19:03:42.775789 4792 scope.go:117] "RemoveContainer" containerID="305645f1f10b20984067c3d0d32bc9a5936e191faecff2bb494be005fc471c65" Sep 29 19:03:42 crc kubenswrapper[4792]: I0929 19:03:42.784231 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-image-registry/image-registry-66df7c8f76-bwqdk" Sep 29 19:03:42 crc kubenswrapper[4792]: I0929 19:03:42.837222 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-fnpzd"] Sep 29 19:04:07 crc kubenswrapper[4792]: I0929 19:04:07.886242 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-image-registry/image-registry-697d97f7c8-fnpzd" podUID="b3ccc1f5-4945-4a14-8f84-363683bbd575" containerName="registry" containerID="cri-o://e4638c54854df5d2f32ada99715b1c9b7f94c14a4ddf883b21181324cdd8d2d0" gracePeriod=30 Sep 29 19:04:08 crc kubenswrapper[4792]: I0929 19:04:08.298370 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-fnpzd" Sep 29 19:04:08 crc kubenswrapper[4792]: I0929 19:04:08.447806 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/b3ccc1f5-4945-4a14-8f84-363683bbd575-registry-certificates\") pod \"b3ccc1f5-4945-4a14-8f84-363683bbd575\" (UID: \"b3ccc1f5-4945-4a14-8f84-363683bbd575\") " Sep 29 19:04:08 crc kubenswrapper[4792]: I0929 19:04:08.447865 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rw2tk\" (UniqueName: \"kubernetes.io/projected/b3ccc1f5-4945-4a14-8f84-363683bbd575-kube-api-access-rw2tk\") pod \"b3ccc1f5-4945-4a14-8f84-363683bbd575\" (UID: \"b3ccc1f5-4945-4a14-8f84-363683bbd575\") " Sep 29 19:04:08 crc kubenswrapper[4792]: I0929 19:04:08.447978 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-storage\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"b3ccc1f5-4945-4a14-8f84-363683bbd575\" (UID: \"b3ccc1f5-4945-4a14-8f84-363683bbd575\") " Sep 29 19:04:08 crc kubenswrapper[4792]: I0929 19:04:08.447998 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/b3ccc1f5-4945-4a14-8f84-363683bbd575-bound-sa-token\") pod \"b3ccc1f5-4945-4a14-8f84-363683bbd575\" (UID: \"b3ccc1f5-4945-4a14-8f84-363683bbd575\") " Sep 29 19:04:08 crc kubenswrapper[4792]: I0929 19:04:08.448060 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/b3ccc1f5-4945-4a14-8f84-363683bbd575-registry-tls\") pod \"b3ccc1f5-4945-4a14-8f84-363683bbd575\" (UID: \"b3ccc1f5-4945-4a14-8f84-363683bbd575\") " Sep 29 19:04:08 crc kubenswrapper[4792]: I0929 19:04:08.448078 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/b3ccc1f5-4945-4a14-8f84-363683bbd575-installation-pull-secrets\") pod \"b3ccc1f5-4945-4a14-8f84-363683bbd575\" (UID: \"b3ccc1f5-4945-4a14-8f84-363683bbd575\") " Sep 29 19:04:08 crc kubenswrapper[4792]: I0929 19:04:08.448098 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/b3ccc1f5-4945-4a14-8f84-363683bbd575-ca-trust-extracted\") pod \"b3ccc1f5-4945-4a14-8f84-363683bbd575\" (UID: \"b3ccc1f5-4945-4a14-8f84-363683bbd575\") " Sep 29 19:04:08 crc kubenswrapper[4792]: I0929 19:04:08.448128 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b3ccc1f5-4945-4a14-8f84-363683bbd575-trusted-ca\") pod \"b3ccc1f5-4945-4a14-8f84-363683bbd575\" (UID: \"b3ccc1f5-4945-4a14-8f84-363683bbd575\") " Sep 29 19:04:08 crc kubenswrapper[4792]: I0929 19:04:08.448816 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b3ccc1f5-4945-4a14-8f84-363683bbd575-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "b3ccc1f5-4945-4a14-8f84-363683bbd575" (UID: "b3ccc1f5-4945-4a14-8f84-363683bbd575"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:04:08 crc kubenswrapper[4792]: I0929 19:04:08.449333 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b3ccc1f5-4945-4a14-8f84-363683bbd575-registry-certificates" (OuterVolumeSpecName: "registry-certificates") pod "b3ccc1f5-4945-4a14-8f84-363683bbd575" (UID: "b3ccc1f5-4945-4a14-8f84-363683bbd575"). InnerVolumeSpecName "registry-certificates". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:04:08 crc kubenswrapper[4792]: I0929 19:04:08.454813 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b3ccc1f5-4945-4a14-8f84-363683bbd575-installation-pull-secrets" (OuterVolumeSpecName: "installation-pull-secrets") pod "b3ccc1f5-4945-4a14-8f84-363683bbd575" (UID: "b3ccc1f5-4945-4a14-8f84-363683bbd575"). InnerVolumeSpecName "installation-pull-secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:04:08 crc kubenswrapper[4792]: I0929 19:04:08.454910 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b3ccc1f5-4945-4a14-8f84-363683bbd575-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "b3ccc1f5-4945-4a14-8f84-363683bbd575" (UID: "b3ccc1f5-4945-4a14-8f84-363683bbd575"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:04:08 crc kubenswrapper[4792]: I0929 19:04:08.455512 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b3ccc1f5-4945-4a14-8f84-363683bbd575-kube-api-access-rw2tk" (OuterVolumeSpecName: "kube-api-access-rw2tk") pod "b3ccc1f5-4945-4a14-8f84-363683bbd575" (UID: "b3ccc1f5-4945-4a14-8f84-363683bbd575"). InnerVolumeSpecName "kube-api-access-rw2tk". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:04:08 crc kubenswrapper[4792]: I0929 19:04:08.458356 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (OuterVolumeSpecName: "registry-storage") pod "b3ccc1f5-4945-4a14-8f84-363683bbd575" (UID: "b3ccc1f5-4945-4a14-8f84-363683bbd575"). InnerVolumeSpecName "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8". PluginName "kubernetes.io/csi", VolumeGidValue "" Sep 29 19:04:08 crc kubenswrapper[4792]: I0929 19:04:08.466984 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b3ccc1f5-4945-4a14-8f84-363683bbd575-ca-trust-extracted" (OuterVolumeSpecName: "ca-trust-extracted") pod "b3ccc1f5-4945-4a14-8f84-363683bbd575" (UID: "b3ccc1f5-4945-4a14-8f84-363683bbd575"). InnerVolumeSpecName "ca-trust-extracted". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 19:04:08 crc kubenswrapper[4792]: I0929 19:04:08.467601 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b3ccc1f5-4945-4a14-8f84-363683bbd575-registry-tls" (OuterVolumeSpecName: "registry-tls") pod "b3ccc1f5-4945-4a14-8f84-363683bbd575" (UID: "b3ccc1f5-4945-4a14-8f84-363683bbd575"). InnerVolumeSpecName "registry-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:04:08 crc kubenswrapper[4792]: I0929 19:04:08.549024 4792 reconciler_common.go:293] "Volume detached for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/b3ccc1f5-4945-4a14-8f84-363683bbd575-registry-certificates\") on node \"crc\" DevicePath \"\"" Sep 29 19:04:08 crc kubenswrapper[4792]: I0929 19:04:08.549068 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rw2tk\" (UniqueName: \"kubernetes.io/projected/b3ccc1f5-4945-4a14-8f84-363683bbd575-kube-api-access-rw2tk\") on node \"crc\" DevicePath \"\"" Sep 29 19:04:08 crc kubenswrapper[4792]: I0929 19:04:08.549080 4792 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/b3ccc1f5-4945-4a14-8f84-363683bbd575-bound-sa-token\") on node \"crc\" DevicePath \"\"" Sep 29 19:04:08 crc kubenswrapper[4792]: I0929 19:04:08.549091 4792 reconciler_common.go:293] "Volume detached for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/b3ccc1f5-4945-4a14-8f84-363683bbd575-registry-tls\") on node \"crc\" DevicePath \"\"" Sep 29 19:04:08 crc kubenswrapper[4792]: I0929 19:04:08.549106 4792 reconciler_common.go:293] "Volume detached for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/b3ccc1f5-4945-4a14-8f84-363683bbd575-installation-pull-secrets\") on node \"crc\" DevicePath \"\"" Sep 29 19:04:08 crc kubenswrapper[4792]: I0929 19:04:08.549117 4792 reconciler_common.go:293] "Volume detached for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/b3ccc1f5-4945-4a14-8f84-363683bbd575-ca-trust-extracted\") on node \"crc\" DevicePath \"\"" Sep 29 19:04:08 crc kubenswrapper[4792]: I0929 19:04:08.549126 4792 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b3ccc1f5-4945-4a14-8f84-363683bbd575-trusted-ca\") on node \"crc\" DevicePath \"\"" Sep 29 19:04:08 crc kubenswrapper[4792]: I0929 19:04:08.922632 4792 generic.go:334] "Generic (PLEG): container finished" podID="b3ccc1f5-4945-4a14-8f84-363683bbd575" containerID="e4638c54854df5d2f32ada99715b1c9b7f94c14a4ddf883b21181324cdd8d2d0" exitCode=0 Sep 29 19:04:08 crc kubenswrapper[4792]: I0929 19:04:08.922679 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-fnpzd" event={"ID":"b3ccc1f5-4945-4a14-8f84-363683bbd575","Type":"ContainerDied","Data":"e4638c54854df5d2f32ada99715b1c9b7f94c14a4ddf883b21181324cdd8d2d0"} Sep 29 19:04:08 crc kubenswrapper[4792]: I0929 19:04:08.922704 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-fnpzd" Sep 29 19:04:08 crc kubenswrapper[4792]: I0929 19:04:08.922727 4792 scope.go:117] "RemoveContainer" containerID="e4638c54854df5d2f32ada99715b1c9b7f94c14a4ddf883b21181324cdd8d2d0" Sep 29 19:04:08 crc kubenswrapper[4792]: I0929 19:04:08.922714 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-fnpzd" event={"ID":"b3ccc1f5-4945-4a14-8f84-363683bbd575","Type":"ContainerDied","Data":"0d4bbd00d0c88324d507a00824f6176a15191e53cc41f6a5259cf4fb5796afaf"} Sep 29 19:04:08 crc kubenswrapper[4792]: I0929 19:04:08.939559 4792 scope.go:117] "RemoveContainer" containerID="e4638c54854df5d2f32ada99715b1c9b7f94c14a4ddf883b21181324cdd8d2d0" Sep 29 19:04:08 crc kubenswrapper[4792]: E0929 19:04:08.940051 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e4638c54854df5d2f32ada99715b1c9b7f94c14a4ddf883b21181324cdd8d2d0\": container with ID starting with e4638c54854df5d2f32ada99715b1c9b7f94c14a4ddf883b21181324cdd8d2d0 not found: ID does not exist" containerID="e4638c54854df5d2f32ada99715b1c9b7f94c14a4ddf883b21181324cdd8d2d0" Sep 29 19:04:08 crc kubenswrapper[4792]: I0929 19:04:08.940149 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e4638c54854df5d2f32ada99715b1c9b7f94c14a4ddf883b21181324cdd8d2d0"} err="failed to get container status \"e4638c54854df5d2f32ada99715b1c9b7f94c14a4ddf883b21181324cdd8d2d0\": rpc error: code = NotFound desc = could not find container \"e4638c54854df5d2f32ada99715b1c9b7f94c14a4ddf883b21181324cdd8d2d0\": container with ID starting with e4638c54854df5d2f32ada99715b1c9b7f94c14a4ddf883b21181324cdd8d2d0 not found: ID does not exist" Sep 29 19:04:08 crc kubenswrapper[4792]: I0929 19:04:08.954490 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-fnpzd"] Sep 29 19:04:08 crc kubenswrapper[4792]: I0929 19:04:08.957558 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-fnpzd"] Sep 29 19:04:09 crc kubenswrapper[4792]: I0929 19:04:09.021630 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b3ccc1f5-4945-4a14-8f84-363683bbd575" path="/var/lib/kubelet/pods/b3ccc1f5-4945-4a14-8f84-363683bbd575/volumes" Sep 29 19:06:11 crc kubenswrapper[4792]: I0929 19:06:11.960108 4792 patch_prober.go:28] interesting pod/machine-config-daemon-p5q59 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 19:06:11 crc kubenswrapper[4792]: I0929 19:06:11.960784 4792 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" podUID="0ae66548-086e-4ca9-bd6f-281ce46e7557" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 19:06:41 crc kubenswrapper[4792]: I0929 19:06:41.960391 4792 patch_prober.go:28] interesting pod/machine-config-daemon-p5q59 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 19:06:41 crc kubenswrapper[4792]: I0929 19:06:41.960964 4792 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" podUID="0ae66548-086e-4ca9-bd6f-281ce46e7557" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 19:07:11 crc kubenswrapper[4792]: I0929 19:07:11.959869 4792 patch_prober.go:28] interesting pod/machine-config-daemon-p5q59 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 19:07:11 crc kubenswrapper[4792]: I0929 19:07:11.960619 4792 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" podUID="0ae66548-086e-4ca9-bd6f-281ce46e7557" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 19:07:11 crc kubenswrapper[4792]: I0929 19:07:11.960672 4792 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" Sep 29 19:07:11 crc kubenswrapper[4792]: I0929 19:07:11.961279 4792 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"28e9b336e995bb00d35a92fadb8e3b916142bf2b43240549bb14e32ddcc21015"} pod="openshift-machine-config-operator/machine-config-daemon-p5q59" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 29 19:07:11 crc kubenswrapper[4792]: I0929 19:07:11.961334 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" podUID="0ae66548-086e-4ca9-bd6f-281ce46e7557" containerName="machine-config-daemon" containerID="cri-o://28e9b336e995bb00d35a92fadb8e3b916142bf2b43240549bb14e32ddcc21015" gracePeriod=600 Sep 29 19:07:12 crc kubenswrapper[4792]: I0929 19:07:12.903611 4792 generic.go:334] "Generic (PLEG): container finished" podID="0ae66548-086e-4ca9-bd6f-281ce46e7557" containerID="28e9b336e995bb00d35a92fadb8e3b916142bf2b43240549bb14e32ddcc21015" exitCode=0 Sep 29 19:07:12 crc kubenswrapper[4792]: I0929 19:07:12.903700 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" event={"ID":"0ae66548-086e-4ca9-bd6f-281ce46e7557","Type":"ContainerDied","Data":"28e9b336e995bb00d35a92fadb8e3b916142bf2b43240549bb14e32ddcc21015"} Sep 29 19:07:12 crc kubenswrapper[4792]: I0929 19:07:12.903923 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" event={"ID":"0ae66548-086e-4ca9-bd6f-281ce46e7557","Type":"ContainerStarted","Data":"487246f4f6005415a540bc4c228e6bec5b9bf5f447044923f1e106cf7a0cba67"} Sep 29 19:07:12 crc kubenswrapper[4792]: I0929 19:07:12.903941 4792 scope.go:117] "RemoveContainer" containerID="7d8c0b7ec3035efa9edf9d9b2ba12dabade2b1415013394067aca0438b434980" Sep 29 19:08:00 crc kubenswrapper[4792]: I0929 19:08:00.663089 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-cainjector-7f985d654d-pxgvr"] Sep 29 19:08:00 crc kubenswrapper[4792]: E0929 19:08:00.663832 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b3ccc1f5-4945-4a14-8f84-363683bbd575" containerName="registry" Sep 29 19:08:00 crc kubenswrapper[4792]: I0929 19:08:00.663857 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="b3ccc1f5-4945-4a14-8f84-363683bbd575" containerName="registry" Sep 29 19:08:00 crc kubenswrapper[4792]: I0929 19:08:00.663949 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="b3ccc1f5-4945-4a14-8f84-363683bbd575" containerName="registry" Sep 29 19:08:00 crc kubenswrapper[4792]: I0929 19:08:00.664341 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-cainjector-7f985d654d-pxgvr" Sep 29 19:08:00 crc kubenswrapper[4792]: I0929 19:08:00.666178 4792 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-cainjector-dockercfg-j9q2n" Sep 29 19:08:00 crc kubenswrapper[4792]: I0929 19:08:00.666299 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager"/"openshift-service-ca.crt" Sep 29 19:08:00 crc kubenswrapper[4792]: I0929 19:08:00.666386 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager"/"kube-root-ca.crt" Sep 29 19:08:00 crc kubenswrapper[4792]: I0929 19:08:00.674607 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-cainjector-7f985d654d-pxgvr"] Sep 29 19:08:00 crc kubenswrapper[4792]: I0929 19:08:00.697784 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-5b446d88c5-sb29r"] Sep 29 19:08:00 crc kubenswrapper[4792]: I0929 19:08:00.698615 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-5b446d88c5-sb29r" Sep 29 19:08:00 crc kubenswrapper[4792]: I0929 19:08:00.701891 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-webhook-5655c58dd6-mx55q"] Sep 29 19:08:00 crc kubenswrapper[4792]: I0929 19:08:00.702631 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-webhook-5655c58dd6-mx55q" Sep 29 19:08:00 crc kubenswrapper[4792]: I0929 19:08:00.705520 4792 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-webhook-dockercfg-lpc64" Sep 29 19:08:00 crc kubenswrapper[4792]: I0929 19:08:00.708331 4792 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-dockercfg-mmmrp" Sep 29 19:08:00 crc kubenswrapper[4792]: I0929 19:08:00.734069 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-5b446d88c5-sb29r"] Sep 29 19:08:00 crc kubenswrapper[4792]: I0929 19:08:00.737916 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-webhook-5655c58dd6-mx55q"] Sep 29 19:08:00 crc kubenswrapper[4792]: I0929 19:08:00.751531 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zrtvh\" (UniqueName: \"kubernetes.io/projected/4bdb9002-6a61-4c32-a32e-3a76cc24a38e-kube-api-access-zrtvh\") pod \"cert-manager-cainjector-7f985d654d-pxgvr\" (UID: \"4bdb9002-6a61-4c32-a32e-3a76cc24a38e\") " pod="cert-manager/cert-manager-cainjector-7f985d654d-pxgvr" Sep 29 19:08:00 crc kubenswrapper[4792]: I0929 19:08:00.751627 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jjxrp\" (UniqueName: \"kubernetes.io/projected/b48f55d3-c9a5-4973-b233-f59ced6a17e6-kube-api-access-jjxrp\") pod \"cert-manager-webhook-5655c58dd6-mx55q\" (UID: \"b48f55d3-c9a5-4973-b233-f59ced6a17e6\") " pod="cert-manager/cert-manager-webhook-5655c58dd6-mx55q" Sep 29 19:08:00 crc kubenswrapper[4792]: I0929 19:08:00.751739 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gn64l\" (UniqueName: \"kubernetes.io/projected/d603ca4a-b40c-439f-b7ed-09a279e9d727-kube-api-access-gn64l\") pod \"cert-manager-5b446d88c5-sb29r\" (UID: \"d603ca4a-b40c-439f-b7ed-09a279e9d727\") " pod="cert-manager/cert-manager-5b446d88c5-sb29r" Sep 29 19:08:00 crc kubenswrapper[4792]: I0929 19:08:00.852812 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gn64l\" (UniqueName: \"kubernetes.io/projected/d603ca4a-b40c-439f-b7ed-09a279e9d727-kube-api-access-gn64l\") pod \"cert-manager-5b446d88c5-sb29r\" (UID: \"d603ca4a-b40c-439f-b7ed-09a279e9d727\") " pod="cert-manager/cert-manager-5b446d88c5-sb29r" Sep 29 19:08:00 crc kubenswrapper[4792]: I0929 19:08:00.852914 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zrtvh\" (UniqueName: \"kubernetes.io/projected/4bdb9002-6a61-4c32-a32e-3a76cc24a38e-kube-api-access-zrtvh\") pod \"cert-manager-cainjector-7f985d654d-pxgvr\" (UID: \"4bdb9002-6a61-4c32-a32e-3a76cc24a38e\") " pod="cert-manager/cert-manager-cainjector-7f985d654d-pxgvr" Sep 29 19:08:00 crc kubenswrapper[4792]: I0929 19:08:00.852949 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jjxrp\" (UniqueName: \"kubernetes.io/projected/b48f55d3-c9a5-4973-b233-f59ced6a17e6-kube-api-access-jjxrp\") pod \"cert-manager-webhook-5655c58dd6-mx55q\" (UID: \"b48f55d3-c9a5-4973-b233-f59ced6a17e6\") " pod="cert-manager/cert-manager-webhook-5655c58dd6-mx55q" Sep 29 19:08:00 crc kubenswrapper[4792]: I0929 19:08:00.870382 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jjxrp\" (UniqueName: \"kubernetes.io/projected/b48f55d3-c9a5-4973-b233-f59ced6a17e6-kube-api-access-jjxrp\") pod \"cert-manager-webhook-5655c58dd6-mx55q\" (UID: \"b48f55d3-c9a5-4973-b233-f59ced6a17e6\") " pod="cert-manager/cert-manager-webhook-5655c58dd6-mx55q" Sep 29 19:08:00 crc kubenswrapper[4792]: I0929 19:08:00.872441 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zrtvh\" (UniqueName: \"kubernetes.io/projected/4bdb9002-6a61-4c32-a32e-3a76cc24a38e-kube-api-access-zrtvh\") pod \"cert-manager-cainjector-7f985d654d-pxgvr\" (UID: \"4bdb9002-6a61-4c32-a32e-3a76cc24a38e\") " pod="cert-manager/cert-manager-cainjector-7f985d654d-pxgvr" Sep 29 19:08:00 crc kubenswrapper[4792]: I0929 19:08:00.872488 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gn64l\" (UniqueName: \"kubernetes.io/projected/d603ca4a-b40c-439f-b7ed-09a279e9d727-kube-api-access-gn64l\") pod \"cert-manager-5b446d88c5-sb29r\" (UID: \"d603ca4a-b40c-439f-b7ed-09a279e9d727\") " pod="cert-manager/cert-manager-5b446d88c5-sb29r" Sep 29 19:08:00 crc kubenswrapper[4792]: I0929 19:08:00.984975 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-cainjector-7f985d654d-pxgvr" Sep 29 19:08:01 crc kubenswrapper[4792]: I0929 19:08:01.015582 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-5b446d88c5-sb29r" Sep 29 19:08:01 crc kubenswrapper[4792]: I0929 19:08:01.025016 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-webhook-5655c58dd6-mx55q" Sep 29 19:08:01 crc kubenswrapper[4792]: I0929 19:08:01.203671 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-cainjector-7f985d654d-pxgvr"] Sep 29 19:08:01 crc kubenswrapper[4792]: I0929 19:08:01.213474 4792 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Sep 29 19:08:01 crc kubenswrapper[4792]: I0929 19:08:01.486601 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-5b446d88c5-sb29r"] Sep 29 19:08:01 crc kubenswrapper[4792]: I0929 19:08:01.491980 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-webhook-5655c58dd6-mx55q"] Sep 29 19:08:01 crc kubenswrapper[4792]: W0929 19:08:01.494136 4792 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd603ca4a_b40c_439f_b7ed_09a279e9d727.slice/crio-83460cff59a2b45b7de945e45b9bcfdee29f68b03ec714f42fbfc1737848e977 WatchSource:0}: Error finding container 83460cff59a2b45b7de945e45b9bcfdee29f68b03ec714f42fbfc1737848e977: Status 404 returned error can't find the container with id 83460cff59a2b45b7de945e45b9bcfdee29f68b03ec714f42fbfc1737848e977 Sep 29 19:08:01 crc kubenswrapper[4792]: W0929 19:08:01.496311 4792 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb48f55d3_c9a5_4973_b233_f59ced6a17e6.slice/crio-6183581146eb0a5fef09375e445df572735148199e1def0caf2544ae2c31aca1 WatchSource:0}: Error finding container 6183581146eb0a5fef09375e445df572735148199e1def0caf2544ae2c31aca1: Status 404 returned error can't find the container with id 6183581146eb0a5fef09375e445df572735148199e1def0caf2544ae2c31aca1 Sep 29 19:08:02 crc kubenswrapper[4792]: I0929 19:08:02.153346 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-5b446d88c5-sb29r" event={"ID":"d603ca4a-b40c-439f-b7ed-09a279e9d727","Type":"ContainerStarted","Data":"83460cff59a2b45b7de945e45b9bcfdee29f68b03ec714f42fbfc1737848e977"} Sep 29 19:08:02 crc kubenswrapper[4792]: I0929 19:08:02.154872 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-cainjector-7f985d654d-pxgvr" event={"ID":"4bdb9002-6a61-4c32-a32e-3a76cc24a38e","Type":"ContainerStarted","Data":"d58ff033da633c18ad6b80e3bc30e27ed380e7cb271f8d0741d129ce904112c7"} Sep 29 19:08:02 crc kubenswrapper[4792]: I0929 19:08:02.155444 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-webhook-5655c58dd6-mx55q" event={"ID":"b48f55d3-c9a5-4973-b233-f59ced6a17e6","Type":"ContainerStarted","Data":"6183581146eb0a5fef09375e445df572735148199e1def0caf2544ae2c31aca1"} Sep 29 19:08:05 crc kubenswrapper[4792]: I0929 19:08:05.174429 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-webhook-5655c58dd6-mx55q" event={"ID":"b48f55d3-c9a5-4973-b233-f59ced6a17e6","Type":"ContainerStarted","Data":"e47eabe5e8d0e09e05603411f665087079419cb1dabc1e58667f8b836b0eefcb"} Sep 29 19:08:05 crc kubenswrapper[4792]: I0929 19:08:05.175386 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="cert-manager/cert-manager-webhook-5655c58dd6-mx55q" Sep 29 19:08:05 crc kubenswrapper[4792]: I0929 19:08:05.177202 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-5b446d88c5-sb29r" event={"ID":"d603ca4a-b40c-439f-b7ed-09a279e9d727","Type":"ContainerStarted","Data":"2482657890e8731eb6703a0813063d0e81a79f1bb27a66f25885ffb68564c377"} Sep 29 19:08:05 crc kubenswrapper[4792]: I0929 19:08:05.179473 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-cainjector-7f985d654d-pxgvr" event={"ID":"4bdb9002-6a61-4c32-a32e-3a76cc24a38e","Type":"ContainerStarted","Data":"3687f8a2cc9fea22d53bcc5c71c104a958185316528482193f481c32995c921f"} Sep 29 19:08:05 crc kubenswrapper[4792]: I0929 19:08:05.192706 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-webhook-5655c58dd6-mx55q" podStartSLOduration=1.943414304 podStartE2EDuration="5.192686658s" podCreationTimestamp="2025-09-29 19:08:00 +0000 UTC" firstStartedPulling="2025-09-29 19:08:01.498678146 +0000 UTC m=+693.491985542" lastFinishedPulling="2025-09-29 19:08:04.7479505 +0000 UTC m=+696.741257896" observedRunningTime="2025-09-29 19:08:05.187591828 +0000 UTC m=+697.180899234" watchObservedRunningTime="2025-09-29 19:08:05.192686658 +0000 UTC m=+697.185994074" Sep 29 19:08:05 crc kubenswrapper[4792]: I0929 19:08:05.200636 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-cainjector-7f985d654d-pxgvr" podStartSLOduration=1.6661307029999999 podStartE2EDuration="5.200615014s" podCreationTimestamp="2025-09-29 19:08:00 +0000 UTC" firstStartedPulling="2025-09-29 19:08:01.213262243 +0000 UTC m=+693.206569639" lastFinishedPulling="2025-09-29 19:08:04.747746554 +0000 UTC m=+696.741053950" observedRunningTime="2025-09-29 19:08:05.199634617 +0000 UTC m=+697.192942034" watchObservedRunningTime="2025-09-29 19:08:05.200615014 +0000 UTC m=+697.193922410" Sep 29 19:08:05 crc kubenswrapper[4792]: I0929 19:08:05.215490 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-5b446d88c5-sb29r" podStartSLOduration=1.897322093 podStartE2EDuration="5.21546981s" podCreationTimestamp="2025-09-29 19:08:00 +0000 UTC" firstStartedPulling="2025-09-29 19:08:01.496061284 +0000 UTC m=+693.489368680" lastFinishedPulling="2025-09-29 19:08:04.814208981 +0000 UTC m=+696.807516397" observedRunningTime="2025-09-29 19:08:05.212719715 +0000 UTC m=+697.206027121" watchObservedRunningTime="2025-09-29 19:08:05.21546981 +0000 UTC m=+697.208777206" Sep 29 19:08:11 crc kubenswrapper[4792]: I0929 19:08:11.028253 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="cert-manager/cert-manager-webhook-5655c58dd6-mx55q" Sep 29 19:08:11 crc kubenswrapper[4792]: I0929 19:08:11.115664 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-hr4cm"] Sep 29 19:08:11 crc kubenswrapper[4792]: I0929 19:08:11.116600 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-hr4cm" podUID="716c5fdd-0e02-4066-9210-93d805b6fe81" containerName="ovn-controller" containerID="cri-o://c3a44c0899a9afeaa74bb22565c3f9514603ce1b83f9794539f677d067785925" gracePeriod=30 Sep 29 19:08:11 crc kubenswrapper[4792]: I0929 19:08:11.116686 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-hr4cm" podUID="716c5fdd-0e02-4066-9210-93d805b6fe81" containerName="kube-rbac-proxy-node" containerID="cri-o://7b64445ce1e067504326c5005136522f885ba8796579cfb651019d2372a89173" gracePeriod=30 Sep 29 19:08:11 crc kubenswrapper[4792]: I0929 19:08:11.116702 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-hr4cm" podUID="716c5fdd-0e02-4066-9210-93d805b6fe81" containerName="kube-rbac-proxy-ovn-metrics" containerID="cri-o://46a3cf64e8fd5f5c75be0dd56175bd00e95e2780c73e39558e3b68ca1e6a44bc" gracePeriod=30 Sep 29 19:08:11 crc kubenswrapper[4792]: I0929 19:08:11.116731 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-hr4cm" podUID="716c5fdd-0e02-4066-9210-93d805b6fe81" containerName="northd" containerID="cri-o://a5023531ae972c8f19f5fbf8cdb3c4040f1b63d5d7b9d00e885607f0f84c88a1" gracePeriod=30 Sep 29 19:08:11 crc kubenswrapper[4792]: I0929 19:08:11.116736 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-hr4cm" podUID="716c5fdd-0e02-4066-9210-93d805b6fe81" containerName="sbdb" containerID="cri-o://7138892e31e3d1949d0ae4789515fc0fd9868469eb14de1464a2f59786b85f08" gracePeriod=30 Sep 29 19:08:11 crc kubenswrapper[4792]: I0929 19:08:11.116731 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-hr4cm" podUID="716c5fdd-0e02-4066-9210-93d805b6fe81" containerName="ovn-acl-logging" containerID="cri-o://7e9625b3628f291ecaa686da104d719695bd8c46eb46d08f9eccab27a2013627" gracePeriod=30 Sep 29 19:08:11 crc kubenswrapper[4792]: I0929 19:08:11.116648 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-hr4cm" podUID="716c5fdd-0e02-4066-9210-93d805b6fe81" containerName="nbdb" containerID="cri-o://3408e50d82d1e7f50d9cd4fb2b4e078059bbc4daba10ca93c3cab56d4fe190be" gracePeriod=30 Sep 29 19:08:11 crc kubenswrapper[4792]: I0929 19:08:11.171483 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-hr4cm" podUID="716c5fdd-0e02-4066-9210-93d805b6fe81" containerName="ovnkube-controller" containerID="cri-o://7ae9d48d57439129f246e25077eb0e832a613136c8e2873a6194e55667617f86" gracePeriod=30 Sep 29 19:08:11 crc kubenswrapper[4792]: I0929 19:08:11.211055 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-5hwvp_100876d3-2539-47f1-91fa-0f91456ccac1/kube-multus/2.log" Sep 29 19:08:11 crc kubenswrapper[4792]: I0929 19:08:11.211559 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-5hwvp_100876d3-2539-47f1-91fa-0f91456ccac1/kube-multus/1.log" Sep 29 19:08:11 crc kubenswrapper[4792]: I0929 19:08:11.211611 4792 generic.go:334] "Generic (PLEG): container finished" podID="100876d3-2539-47f1-91fa-0f91456ccac1" containerID="ce5b36817c4429539a535b70ada4c3b33d548c1cffdb995523a6276f42da0607" exitCode=2 Sep 29 19:08:11 crc kubenswrapper[4792]: I0929 19:08:11.211641 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-5hwvp" event={"ID":"100876d3-2539-47f1-91fa-0f91456ccac1","Type":"ContainerDied","Data":"ce5b36817c4429539a535b70ada4c3b33d548c1cffdb995523a6276f42da0607"} Sep 29 19:08:11 crc kubenswrapper[4792]: I0929 19:08:11.211674 4792 scope.go:117] "RemoveContainer" containerID="e36db1c84da57d66a764493ff741136d4bec9e23eb8f9f9517fd82dd9f829e8c" Sep 29 19:08:11 crc kubenswrapper[4792]: I0929 19:08:11.212081 4792 scope.go:117] "RemoveContainer" containerID="ce5b36817c4429539a535b70ada4c3b33d548c1cffdb995523a6276f42da0607" Sep 29 19:08:11 crc kubenswrapper[4792]: E0929 19:08:11.212317 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 20s restarting failed container=kube-multus pod=multus-5hwvp_openshift-multus(100876d3-2539-47f1-91fa-0f91456ccac1)\"" pod="openshift-multus/multus-5hwvp" podUID="100876d3-2539-47f1-91fa-0f91456ccac1" Sep 29 19:08:11 crc kubenswrapper[4792]: I0929 19:08:11.451700 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-hr4cm_716c5fdd-0e02-4066-9210-93d805b6fe81/ovnkube-controller/3.log" Sep 29 19:08:11 crc kubenswrapper[4792]: I0929 19:08:11.454036 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-hr4cm_716c5fdd-0e02-4066-9210-93d805b6fe81/ovn-acl-logging/0.log" Sep 29 19:08:11 crc kubenswrapper[4792]: I0929 19:08:11.454551 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-hr4cm_716c5fdd-0e02-4066-9210-93d805b6fe81/ovn-controller/0.log" Sep 29 19:08:11 crc kubenswrapper[4792]: I0929 19:08:11.455082 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-hr4cm" Sep 29 19:08:11 crc kubenswrapper[4792]: I0929 19:08:11.515634 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-bs7kw"] Sep 29 19:08:11 crc kubenswrapper[4792]: E0929 19:08:11.516094 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="716c5fdd-0e02-4066-9210-93d805b6fe81" containerName="ovnkube-controller" Sep 29 19:08:11 crc kubenswrapper[4792]: I0929 19:08:11.516117 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="716c5fdd-0e02-4066-9210-93d805b6fe81" containerName="ovnkube-controller" Sep 29 19:08:11 crc kubenswrapper[4792]: E0929 19:08:11.516136 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="716c5fdd-0e02-4066-9210-93d805b6fe81" containerName="northd" Sep 29 19:08:11 crc kubenswrapper[4792]: I0929 19:08:11.516145 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="716c5fdd-0e02-4066-9210-93d805b6fe81" containerName="northd" Sep 29 19:08:11 crc kubenswrapper[4792]: E0929 19:08:11.516165 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="716c5fdd-0e02-4066-9210-93d805b6fe81" containerName="ovn-acl-logging" Sep 29 19:08:11 crc kubenswrapper[4792]: I0929 19:08:11.516174 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="716c5fdd-0e02-4066-9210-93d805b6fe81" containerName="ovn-acl-logging" Sep 29 19:08:11 crc kubenswrapper[4792]: E0929 19:08:11.516191 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="716c5fdd-0e02-4066-9210-93d805b6fe81" containerName="ovnkube-controller" Sep 29 19:08:11 crc kubenswrapper[4792]: I0929 19:08:11.516201 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="716c5fdd-0e02-4066-9210-93d805b6fe81" containerName="ovnkube-controller" Sep 29 19:08:11 crc kubenswrapper[4792]: E0929 19:08:11.516222 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="716c5fdd-0e02-4066-9210-93d805b6fe81" containerName="ovnkube-controller" Sep 29 19:08:11 crc kubenswrapper[4792]: I0929 19:08:11.516231 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="716c5fdd-0e02-4066-9210-93d805b6fe81" containerName="ovnkube-controller" Sep 29 19:08:11 crc kubenswrapper[4792]: E0929 19:08:11.516247 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="716c5fdd-0e02-4066-9210-93d805b6fe81" containerName="kubecfg-setup" Sep 29 19:08:11 crc kubenswrapper[4792]: I0929 19:08:11.516259 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="716c5fdd-0e02-4066-9210-93d805b6fe81" containerName="kubecfg-setup" Sep 29 19:08:11 crc kubenswrapper[4792]: E0929 19:08:11.516267 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="716c5fdd-0e02-4066-9210-93d805b6fe81" containerName="sbdb" Sep 29 19:08:11 crc kubenswrapper[4792]: I0929 19:08:11.516275 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="716c5fdd-0e02-4066-9210-93d805b6fe81" containerName="sbdb" Sep 29 19:08:11 crc kubenswrapper[4792]: E0929 19:08:11.516284 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="716c5fdd-0e02-4066-9210-93d805b6fe81" containerName="nbdb" Sep 29 19:08:11 crc kubenswrapper[4792]: I0929 19:08:11.516291 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="716c5fdd-0e02-4066-9210-93d805b6fe81" containerName="nbdb" Sep 29 19:08:11 crc kubenswrapper[4792]: E0929 19:08:11.516308 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="716c5fdd-0e02-4066-9210-93d805b6fe81" containerName="ovn-controller" Sep 29 19:08:11 crc kubenswrapper[4792]: I0929 19:08:11.516315 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="716c5fdd-0e02-4066-9210-93d805b6fe81" containerName="ovn-controller" Sep 29 19:08:11 crc kubenswrapper[4792]: E0929 19:08:11.516324 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="716c5fdd-0e02-4066-9210-93d805b6fe81" containerName="kube-rbac-proxy-node" Sep 29 19:08:11 crc kubenswrapper[4792]: I0929 19:08:11.516332 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="716c5fdd-0e02-4066-9210-93d805b6fe81" containerName="kube-rbac-proxy-node" Sep 29 19:08:11 crc kubenswrapper[4792]: E0929 19:08:11.516352 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="716c5fdd-0e02-4066-9210-93d805b6fe81" containerName="kube-rbac-proxy-ovn-metrics" Sep 29 19:08:11 crc kubenswrapper[4792]: I0929 19:08:11.516360 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="716c5fdd-0e02-4066-9210-93d805b6fe81" containerName="kube-rbac-proxy-ovn-metrics" Sep 29 19:08:11 crc kubenswrapper[4792]: I0929 19:08:11.516629 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="716c5fdd-0e02-4066-9210-93d805b6fe81" containerName="ovnkube-controller" Sep 29 19:08:11 crc kubenswrapper[4792]: I0929 19:08:11.516642 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="716c5fdd-0e02-4066-9210-93d805b6fe81" containerName="ovn-controller" Sep 29 19:08:11 crc kubenswrapper[4792]: I0929 19:08:11.516658 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="716c5fdd-0e02-4066-9210-93d805b6fe81" containerName="northd" Sep 29 19:08:11 crc kubenswrapper[4792]: I0929 19:08:11.516672 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="716c5fdd-0e02-4066-9210-93d805b6fe81" containerName="kube-rbac-proxy-ovn-metrics" Sep 29 19:08:11 crc kubenswrapper[4792]: I0929 19:08:11.516682 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="716c5fdd-0e02-4066-9210-93d805b6fe81" containerName="ovnkube-controller" Sep 29 19:08:11 crc kubenswrapper[4792]: I0929 19:08:11.516697 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="716c5fdd-0e02-4066-9210-93d805b6fe81" containerName="sbdb" Sep 29 19:08:11 crc kubenswrapper[4792]: I0929 19:08:11.516712 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="716c5fdd-0e02-4066-9210-93d805b6fe81" containerName="ovnkube-controller" Sep 29 19:08:11 crc kubenswrapper[4792]: I0929 19:08:11.516721 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="716c5fdd-0e02-4066-9210-93d805b6fe81" containerName="ovnkube-controller" Sep 29 19:08:11 crc kubenswrapper[4792]: I0929 19:08:11.516738 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="716c5fdd-0e02-4066-9210-93d805b6fe81" containerName="ovn-acl-logging" Sep 29 19:08:11 crc kubenswrapper[4792]: I0929 19:08:11.516753 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="716c5fdd-0e02-4066-9210-93d805b6fe81" containerName="kube-rbac-proxy-node" Sep 29 19:08:11 crc kubenswrapper[4792]: I0929 19:08:11.516770 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="716c5fdd-0e02-4066-9210-93d805b6fe81" containerName="nbdb" Sep 29 19:08:11 crc kubenswrapper[4792]: E0929 19:08:11.524398 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="716c5fdd-0e02-4066-9210-93d805b6fe81" containerName="ovnkube-controller" Sep 29 19:08:11 crc kubenswrapper[4792]: I0929 19:08:11.524432 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="716c5fdd-0e02-4066-9210-93d805b6fe81" containerName="ovnkube-controller" Sep 29 19:08:11 crc kubenswrapper[4792]: E0929 19:08:11.524454 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="716c5fdd-0e02-4066-9210-93d805b6fe81" containerName="ovnkube-controller" Sep 29 19:08:11 crc kubenswrapper[4792]: I0929 19:08:11.524461 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="716c5fdd-0e02-4066-9210-93d805b6fe81" containerName="ovnkube-controller" Sep 29 19:08:11 crc kubenswrapper[4792]: I0929 19:08:11.524670 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="716c5fdd-0e02-4066-9210-93d805b6fe81" containerName="ovnkube-controller" Sep 29 19:08:11 crc kubenswrapper[4792]: I0929 19:08:11.527523 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-bs7kw" Sep 29 19:08:11 crc kubenswrapper[4792]: I0929 19:08:11.596103 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/716c5fdd-0e02-4066-9210-93d805b6fe81-run-ovn\") pod \"716c5fdd-0e02-4066-9210-93d805b6fe81\" (UID: \"716c5fdd-0e02-4066-9210-93d805b6fe81\") " Sep 29 19:08:11 crc kubenswrapper[4792]: I0929 19:08:11.596144 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/716c5fdd-0e02-4066-9210-93d805b6fe81-var-lib-openvswitch\") pod \"716c5fdd-0e02-4066-9210-93d805b6fe81\" (UID: \"716c5fdd-0e02-4066-9210-93d805b6fe81\") " Sep 29 19:08:11 crc kubenswrapper[4792]: I0929 19:08:11.596166 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/716c5fdd-0e02-4066-9210-93d805b6fe81-env-overrides\") pod \"716c5fdd-0e02-4066-9210-93d805b6fe81\" (UID: \"716c5fdd-0e02-4066-9210-93d805b6fe81\") " Sep 29 19:08:11 crc kubenswrapper[4792]: I0929 19:08:11.596205 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/716c5fdd-0e02-4066-9210-93d805b6fe81-host-slash\") pod \"716c5fdd-0e02-4066-9210-93d805b6fe81\" (UID: \"716c5fdd-0e02-4066-9210-93d805b6fe81\") " Sep 29 19:08:11 crc kubenswrapper[4792]: I0929 19:08:11.596250 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/716c5fdd-0e02-4066-9210-93d805b6fe81-run-systemd\") pod \"716c5fdd-0e02-4066-9210-93d805b6fe81\" (UID: \"716c5fdd-0e02-4066-9210-93d805b6fe81\") " Sep 29 19:08:11 crc kubenswrapper[4792]: I0929 19:08:11.596242 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/716c5fdd-0e02-4066-9210-93d805b6fe81-run-ovn" (OuterVolumeSpecName: "run-ovn") pod "716c5fdd-0e02-4066-9210-93d805b6fe81" (UID: "716c5fdd-0e02-4066-9210-93d805b6fe81"). InnerVolumeSpecName "run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 29 19:08:11 crc kubenswrapper[4792]: I0929 19:08:11.596284 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/716c5fdd-0e02-4066-9210-93d805b6fe81-var-lib-openvswitch" (OuterVolumeSpecName: "var-lib-openvswitch") pod "716c5fdd-0e02-4066-9210-93d805b6fe81" (UID: "716c5fdd-0e02-4066-9210-93d805b6fe81"). InnerVolumeSpecName "var-lib-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 29 19:08:11 crc kubenswrapper[4792]: I0929 19:08:11.596345 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/716c5fdd-0e02-4066-9210-93d805b6fe81-host-slash" (OuterVolumeSpecName: "host-slash") pod "716c5fdd-0e02-4066-9210-93d805b6fe81" (UID: "716c5fdd-0e02-4066-9210-93d805b6fe81"). InnerVolumeSpecName "host-slash". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 29 19:08:11 crc kubenswrapper[4792]: I0929 19:08:11.596354 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/716c5fdd-0e02-4066-9210-93d805b6fe81-host-kubelet\") pod \"716c5fdd-0e02-4066-9210-93d805b6fe81\" (UID: \"716c5fdd-0e02-4066-9210-93d805b6fe81\") " Sep 29 19:08:11 crc kubenswrapper[4792]: I0929 19:08:11.596388 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/716c5fdd-0e02-4066-9210-93d805b6fe81-host-kubelet" (OuterVolumeSpecName: "host-kubelet") pod "716c5fdd-0e02-4066-9210-93d805b6fe81" (UID: "716c5fdd-0e02-4066-9210-93d805b6fe81"). InnerVolumeSpecName "host-kubelet". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 29 19:08:11 crc kubenswrapper[4792]: I0929 19:08:11.596420 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/716c5fdd-0e02-4066-9210-93d805b6fe81-log-socket\") pod \"716c5fdd-0e02-4066-9210-93d805b6fe81\" (UID: \"716c5fdd-0e02-4066-9210-93d805b6fe81\") " Sep 29 19:08:11 crc kubenswrapper[4792]: I0929 19:08:11.596442 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/716c5fdd-0e02-4066-9210-93d805b6fe81-log-socket" (OuterVolumeSpecName: "log-socket") pod "716c5fdd-0e02-4066-9210-93d805b6fe81" (UID: "716c5fdd-0e02-4066-9210-93d805b6fe81"). InnerVolumeSpecName "log-socket". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 29 19:08:11 crc kubenswrapper[4792]: I0929 19:08:11.596668 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/716c5fdd-0e02-4066-9210-93d805b6fe81-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "716c5fdd-0e02-4066-9210-93d805b6fe81" (UID: "716c5fdd-0e02-4066-9210-93d805b6fe81"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:08:11 crc kubenswrapper[4792]: I0929 19:08:11.596798 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/716c5fdd-0e02-4066-9210-93d805b6fe81-ovnkube-config\") pod \"716c5fdd-0e02-4066-9210-93d805b6fe81\" (UID: \"716c5fdd-0e02-4066-9210-93d805b6fe81\") " Sep 29 19:08:11 crc kubenswrapper[4792]: I0929 19:08:11.596824 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/716c5fdd-0e02-4066-9210-93d805b6fe81-systemd-units\") pod \"716c5fdd-0e02-4066-9210-93d805b6fe81\" (UID: \"716c5fdd-0e02-4066-9210-93d805b6fe81\") " Sep 29 19:08:11 crc kubenswrapper[4792]: I0929 19:08:11.596899 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/716c5fdd-0e02-4066-9210-93d805b6fe81-host-cni-netd\") pod \"716c5fdd-0e02-4066-9210-93d805b6fe81\" (UID: \"716c5fdd-0e02-4066-9210-93d805b6fe81\") " Sep 29 19:08:11 crc kubenswrapper[4792]: I0929 19:08:11.596931 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/716c5fdd-0e02-4066-9210-93d805b6fe81-host-var-lib-cni-networks-ovn-kubernetes\") pod \"716c5fdd-0e02-4066-9210-93d805b6fe81\" (UID: \"716c5fdd-0e02-4066-9210-93d805b6fe81\") " Sep 29 19:08:11 crc kubenswrapper[4792]: I0929 19:08:11.596943 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/716c5fdd-0e02-4066-9210-93d805b6fe81-systemd-units" (OuterVolumeSpecName: "systemd-units") pod "716c5fdd-0e02-4066-9210-93d805b6fe81" (UID: "716c5fdd-0e02-4066-9210-93d805b6fe81"). InnerVolumeSpecName "systemd-units". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 29 19:08:11 crc kubenswrapper[4792]: I0929 19:08:11.596958 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/716c5fdd-0e02-4066-9210-93d805b6fe81-run-openvswitch\") pod \"716c5fdd-0e02-4066-9210-93d805b6fe81\" (UID: \"716c5fdd-0e02-4066-9210-93d805b6fe81\") " Sep 29 19:08:11 crc kubenswrapper[4792]: I0929 19:08:11.596984 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/716c5fdd-0e02-4066-9210-93d805b6fe81-node-log\") pod \"716c5fdd-0e02-4066-9210-93d805b6fe81\" (UID: \"716c5fdd-0e02-4066-9210-93d805b6fe81\") " Sep 29 19:08:11 crc kubenswrapper[4792]: I0929 19:08:11.597010 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-72xxv\" (UniqueName: \"kubernetes.io/projected/716c5fdd-0e02-4066-9210-93d805b6fe81-kube-api-access-72xxv\") pod \"716c5fdd-0e02-4066-9210-93d805b6fe81\" (UID: \"716c5fdd-0e02-4066-9210-93d805b6fe81\") " Sep 29 19:08:11 crc kubenswrapper[4792]: I0929 19:08:11.597037 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/716c5fdd-0e02-4066-9210-93d805b6fe81-ovnkube-script-lib\") pod \"716c5fdd-0e02-4066-9210-93d805b6fe81\" (UID: \"716c5fdd-0e02-4066-9210-93d805b6fe81\") " Sep 29 19:08:11 crc kubenswrapper[4792]: I0929 19:08:11.597066 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/716c5fdd-0e02-4066-9210-93d805b6fe81-host-run-ovn-kubernetes\") pod \"716c5fdd-0e02-4066-9210-93d805b6fe81\" (UID: \"716c5fdd-0e02-4066-9210-93d805b6fe81\") " Sep 29 19:08:11 crc kubenswrapper[4792]: I0929 19:08:11.597087 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/716c5fdd-0e02-4066-9210-93d805b6fe81-etc-openvswitch\") pod \"716c5fdd-0e02-4066-9210-93d805b6fe81\" (UID: \"716c5fdd-0e02-4066-9210-93d805b6fe81\") " Sep 29 19:08:11 crc kubenswrapper[4792]: I0929 19:08:11.597105 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/716c5fdd-0e02-4066-9210-93d805b6fe81-host-run-netns\") pod \"716c5fdd-0e02-4066-9210-93d805b6fe81\" (UID: \"716c5fdd-0e02-4066-9210-93d805b6fe81\") " Sep 29 19:08:11 crc kubenswrapper[4792]: I0929 19:08:11.597210 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/716c5fdd-0e02-4066-9210-93d805b6fe81-ovn-node-metrics-cert\") pod \"716c5fdd-0e02-4066-9210-93d805b6fe81\" (UID: \"716c5fdd-0e02-4066-9210-93d805b6fe81\") " Sep 29 19:08:11 crc kubenswrapper[4792]: I0929 19:08:11.597237 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/716c5fdd-0e02-4066-9210-93d805b6fe81-host-cni-bin\") pod \"716c5fdd-0e02-4066-9210-93d805b6fe81\" (UID: \"716c5fdd-0e02-4066-9210-93d805b6fe81\") " Sep 29 19:08:11 crc kubenswrapper[4792]: I0929 19:08:11.597246 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/716c5fdd-0e02-4066-9210-93d805b6fe81-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "716c5fdd-0e02-4066-9210-93d805b6fe81" (UID: "716c5fdd-0e02-4066-9210-93d805b6fe81"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:08:11 crc kubenswrapper[4792]: I0929 19:08:11.597425 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/98a059b7-9711-49f3-8545-b6027d15e657-host-slash\") pod \"ovnkube-node-bs7kw\" (UID: \"98a059b7-9711-49f3-8545-b6027d15e657\") " pod="openshift-ovn-kubernetes/ovnkube-node-bs7kw" Sep 29 19:08:11 crc kubenswrapper[4792]: I0929 19:08:11.597453 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/98a059b7-9711-49f3-8545-b6027d15e657-systemd-units\") pod \"ovnkube-node-bs7kw\" (UID: \"98a059b7-9711-49f3-8545-b6027d15e657\") " pod="openshift-ovn-kubernetes/ovnkube-node-bs7kw" Sep 29 19:08:11 crc kubenswrapper[4792]: I0929 19:08:11.597480 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/98a059b7-9711-49f3-8545-b6027d15e657-host-kubelet\") pod \"ovnkube-node-bs7kw\" (UID: \"98a059b7-9711-49f3-8545-b6027d15e657\") " pod="openshift-ovn-kubernetes/ovnkube-node-bs7kw" Sep 29 19:08:11 crc kubenswrapper[4792]: I0929 19:08:11.597502 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/98a059b7-9711-49f3-8545-b6027d15e657-ovnkube-config\") pod \"ovnkube-node-bs7kw\" (UID: \"98a059b7-9711-49f3-8545-b6027d15e657\") " pod="openshift-ovn-kubernetes/ovnkube-node-bs7kw" Sep 29 19:08:11 crc kubenswrapper[4792]: I0929 19:08:11.597509 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/716c5fdd-0e02-4066-9210-93d805b6fe81-host-var-lib-cni-networks-ovn-kubernetes" (OuterVolumeSpecName: "host-var-lib-cni-networks-ovn-kubernetes") pod "716c5fdd-0e02-4066-9210-93d805b6fe81" (UID: "716c5fdd-0e02-4066-9210-93d805b6fe81"). InnerVolumeSpecName "host-var-lib-cni-networks-ovn-kubernetes". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 29 19:08:11 crc kubenswrapper[4792]: I0929 19:08:11.597531 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/98a059b7-9711-49f3-8545-b6027d15e657-run-openvswitch\") pod \"ovnkube-node-bs7kw\" (UID: \"98a059b7-9711-49f3-8545-b6027d15e657\") " pod="openshift-ovn-kubernetes/ovnkube-node-bs7kw" Sep 29 19:08:11 crc kubenswrapper[4792]: I0929 19:08:11.597538 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/716c5fdd-0e02-4066-9210-93d805b6fe81-host-cni-netd" (OuterVolumeSpecName: "host-cni-netd") pod "716c5fdd-0e02-4066-9210-93d805b6fe81" (UID: "716c5fdd-0e02-4066-9210-93d805b6fe81"). InnerVolumeSpecName "host-cni-netd". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 29 19:08:11 crc kubenswrapper[4792]: I0929 19:08:11.597552 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/98a059b7-9711-49f3-8545-b6027d15e657-run-ovn\") pod \"ovnkube-node-bs7kw\" (UID: \"98a059b7-9711-49f3-8545-b6027d15e657\") " pod="openshift-ovn-kubernetes/ovnkube-node-bs7kw" Sep 29 19:08:11 crc kubenswrapper[4792]: I0929 19:08:11.597559 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/716c5fdd-0e02-4066-9210-93d805b6fe81-run-openvswitch" (OuterVolumeSpecName: "run-openvswitch") pod "716c5fdd-0e02-4066-9210-93d805b6fe81" (UID: "716c5fdd-0e02-4066-9210-93d805b6fe81"). InnerVolumeSpecName "run-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 29 19:08:11 crc kubenswrapper[4792]: I0929 19:08:11.597577 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/716c5fdd-0e02-4066-9210-93d805b6fe81-node-log" (OuterVolumeSpecName: "node-log") pod "716c5fdd-0e02-4066-9210-93d805b6fe81" (UID: "716c5fdd-0e02-4066-9210-93d805b6fe81"). InnerVolumeSpecName "node-log". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 29 19:08:11 crc kubenswrapper[4792]: I0929 19:08:11.597577 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/98a059b7-9711-49f3-8545-b6027d15e657-ovn-node-metrics-cert\") pod \"ovnkube-node-bs7kw\" (UID: \"98a059b7-9711-49f3-8545-b6027d15e657\") " pod="openshift-ovn-kubernetes/ovnkube-node-bs7kw" Sep 29 19:08:11 crc kubenswrapper[4792]: I0929 19:08:11.597598 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/716c5fdd-0e02-4066-9210-93d805b6fe81-host-cni-bin" (OuterVolumeSpecName: "host-cni-bin") pod "716c5fdd-0e02-4066-9210-93d805b6fe81" (UID: "716c5fdd-0e02-4066-9210-93d805b6fe81"). InnerVolumeSpecName "host-cni-bin". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 29 19:08:11 crc kubenswrapper[4792]: I0929 19:08:11.597606 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/98a059b7-9711-49f3-8545-b6027d15e657-var-lib-openvswitch\") pod \"ovnkube-node-bs7kw\" (UID: \"98a059b7-9711-49f3-8545-b6027d15e657\") " pod="openshift-ovn-kubernetes/ovnkube-node-bs7kw" Sep 29 19:08:11 crc kubenswrapper[4792]: I0929 19:08:11.597613 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/716c5fdd-0e02-4066-9210-93d805b6fe81-etc-openvswitch" (OuterVolumeSpecName: "etc-openvswitch") pod "716c5fdd-0e02-4066-9210-93d805b6fe81" (UID: "716c5fdd-0e02-4066-9210-93d805b6fe81"). InnerVolumeSpecName "etc-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 29 19:08:11 crc kubenswrapper[4792]: I0929 19:08:11.597616 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/716c5fdd-0e02-4066-9210-93d805b6fe81-host-run-netns" (OuterVolumeSpecName: "host-run-netns") pod "716c5fdd-0e02-4066-9210-93d805b6fe81" (UID: "716c5fdd-0e02-4066-9210-93d805b6fe81"). InnerVolumeSpecName "host-run-netns". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 29 19:08:11 crc kubenswrapper[4792]: I0929 19:08:11.597709 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/716c5fdd-0e02-4066-9210-93d805b6fe81-host-run-ovn-kubernetes" (OuterVolumeSpecName: "host-run-ovn-kubernetes") pod "716c5fdd-0e02-4066-9210-93d805b6fe81" (UID: "716c5fdd-0e02-4066-9210-93d805b6fe81"). InnerVolumeSpecName "host-run-ovn-kubernetes". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 29 19:08:11 crc kubenswrapper[4792]: I0929 19:08:11.597629 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/98a059b7-9711-49f3-8545-b6027d15e657-host-cni-netd\") pod \"ovnkube-node-bs7kw\" (UID: \"98a059b7-9711-49f3-8545-b6027d15e657\") " pod="openshift-ovn-kubernetes/ovnkube-node-bs7kw" Sep 29 19:08:11 crc kubenswrapper[4792]: I0929 19:08:11.597744 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/98a059b7-9711-49f3-8545-b6027d15e657-node-log\") pod \"ovnkube-node-bs7kw\" (UID: \"98a059b7-9711-49f3-8545-b6027d15e657\") " pod="openshift-ovn-kubernetes/ovnkube-node-bs7kw" Sep 29 19:08:11 crc kubenswrapper[4792]: I0929 19:08:11.597768 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/98a059b7-9711-49f3-8545-b6027d15e657-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-bs7kw\" (UID: \"98a059b7-9711-49f3-8545-b6027d15e657\") " pod="openshift-ovn-kubernetes/ovnkube-node-bs7kw" Sep 29 19:08:11 crc kubenswrapper[4792]: I0929 19:08:11.597790 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/98a059b7-9711-49f3-8545-b6027d15e657-log-socket\") pod \"ovnkube-node-bs7kw\" (UID: \"98a059b7-9711-49f3-8545-b6027d15e657\") " pod="openshift-ovn-kubernetes/ovnkube-node-bs7kw" Sep 29 19:08:11 crc kubenswrapper[4792]: I0929 19:08:11.597811 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/98a059b7-9711-49f3-8545-b6027d15e657-etc-openvswitch\") pod \"ovnkube-node-bs7kw\" (UID: \"98a059b7-9711-49f3-8545-b6027d15e657\") " pod="openshift-ovn-kubernetes/ovnkube-node-bs7kw" Sep 29 19:08:11 crc kubenswrapper[4792]: I0929 19:08:11.597837 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/98a059b7-9711-49f3-8545-b6027d15e657-host-cni-bin\") pod \"ovnkube-node-bs7kw\" (UID: \"98a059b7-9711-49f3-8545-b6027d15e657\") " pod="openshift-ovn-kubernetes/ovnkube-node-bs7kw" Sep 29 19:08:11 crc kubenswrapper[4792]: I0929 19:08:11.597878 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/98a059b7-9711-49f3-8545-b6027d15e657-host-run-netns\") pod \"ovnkube-node-bs7kw\" (UID: \"98a059b7-9711-49f3-8545-b6027d15e657\") " pod="openshift-ovn-kubernetes/ovnkube-node-bs7kw" Sep 29 19:08:11 crc kubenswrapper[4792]: I0929 19:08:11.597908 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/98a059b7-9711-49f3-8545-b6027d15e657-run-systemd\") pod \"ovnkube-node-bs7kw\" (UID: \"98a059b7-9711-49f3-8545-b6027d15e657\") " pod="openshift-ovn-kubernetes/ovnkube-node-bs7kw" Sep 29 19:08:11 crc kubenswrapper[4792]: I0929 19:08:11.597928 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/98a059b7-9711-49f3-8545-b6027d15e657-env-overrides\") pod \"ovnkube-node-bs7kw\" (UID: \"98a059b7-9711-49f3-8545-b6027d15e657\") " pod="openshift-ovn-kubernetes/ovnkube-node-bs7kw" Sep 29 19:08:11 crc kubenswrapper[4792]: I0929 19:08:11.597960 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/98a059b7-9711-49f3-8545-b6027d15e657-host-run-ovn-kubernetes\") pod \"ovnkube-node-bs7kw\" (UID: \"98a059b7-9711-49f3-8545-b6027d15e657\") " pod="openshift-ovn-kubernetes/ovnkube-node-bs7kw" Sep 29 19:08:11 crc kubenswrapper[4792]: I0929 19:08:11.597983 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/98a059b7-9711-49f3-8545-b6027d15e657-ovnkube-script-lib\") pod \"ovnkube-node-bs7kw\" (UID: \"98a059b7-9711-49f3-8545-b6027d15e657\") " pod="openshift-ovn-kubernetes/ovnkube-node-bs7kw" Sep 29 19:08:11 crc kubenswrapper[4792]: I0929 19:08:11.597996 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/716c5fdd-0e02-4066-9210-93d805b6fe81-ovnkube-script-lib" (OuterVolumeSpecName: "ovnkube-script-lib") pod "716c5fdd-0e02-4066-9210-93d805b6fe81" (UID: "716c5fdd-0e02-4066-9210-93d805b6fe81"). InnerVolumeSpecName "ovnkube-script-lib". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:08:11 crc kubenswrapper[4792]: I0929 19:08:11.598019 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jtwzt\" (UniqueName: \"kubernetes.io/projected/98a059b7-9711-49f3-8545-b6027d15e657-kube-api-access-jtwzt\") pod \"ovnkube-node-bs7kw\" (UID: \"98a059b7-9711-49f3-8545-b6027d15e657\") " pod="openshift-ovn-kubernetes/ovnkube-node-bs7kw" Sep 29 19:08:11 crc kubenswrapper[4792]: I0929 19:08:11.598080 4792 reconciler_common.go:293] "Volume detached for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/716c5fdd-0e02-4066-9210-93d805b6fe81-var-lib-openvswitch\") on node \"crc\" DevicePath \"\"" Sep 29 19:08:11 crc kubenswrapper[4792]: I0929 19:08:11.598093 4792 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/716c5fdd-0e02-4066-9210-93d805b6fe81-env-overrides\") on node \"crc\" DevicePath \"\"" Sep 29 19:08:11 crc kubenswrapper[4792]: I0929 19:08:11.598105 4792 reconciler_common.go:293] "Volume detached for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/716c5fdd-0e02-4066-9210-93d805b6fe81-host-slash\") on node \"crc\" DevicePath \"\"" Sep 29 19:08:11 crc kubenswrapper[4792]: I0929 19:08:11.598117 4792 reconciler_common.go:293] "Volume detached for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/716c5fdd-0e02-4066-9210-93d805b6fe81-host-kubelet\") on node \"crc\" DevicePath \"\"" Sep 29 19:08:11 crc kubenswrapper[4792]: I0929 19:08:11.598126 4792 reconciler_common.go:293] "Volume detached for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/716c5fdd-0e02-4066-9210-93d805b6fe81-log-socket\") on node \"crc\" DevicePath \"\"" Sep 29 19:08:11 crc kubenswrapper[4792]: I0929 19:08:11.598136 4792 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/716c5fdd-0e02-4066-9210-93d805b6fe81-ovnkube-config\") on node \"crc\" DevicePath \"\"" Sep 29 19:08:11 crc kubenswrapper[4792]: I0929 19:08:11.598145 4792 reconciler_common.go:293] "Volume detached for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/716c5fdd-0e02-4066-9210-93d805b6fe81-systemd-units\") on node \"crc\" DevicePath \"\"" Sep 29 19:08:11 crc kubenswrapper[4792]: I0929 19:08:11.598153 4792 reconciler_common.go:293] "Volume detached for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/716c5fdd-0e02-4066-9210-93d805b6fe81-host-cni-netd\") on node \"crc\" DevicePath \"\"" Sep 29 19:08:11 crc kubenswrapper[4792]: I0929 19:08:11.598162 4792 reconciler_common.go:293] "Volume detached for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/716c5fdd-0e02-4066-9210-93d805b6fe81-host-var-lib-cni-networks-ovn-kubernetes\") on node \"crc\" DevicePath \"\"" Sep 29 19:08:11 crc kubenswrapper[4792]: I0929 19:08:11.598171 4792 reconciler_common.go:293] "Volume detached for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/716c5fdd-0e02-4066-9210-93d805b6fe81-run-openvswitch\") on node \"crc\" DevicePath \"\"" Sep 29 19:08:11 crc kubenswrapper[4792]: I0929 19:08:11.598195 4792 reconciler_common.go:293] "Volume detached for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/716c5fdd-0e02-4066-9210-93d805b6fe81-node-log\") on node \"crc\" DevicePath \"\"" Sep 29 19:08:11 crc kubenswrapper[4792]: I0929 19:08:11.598203 4792 reconciler_common.go:293] "Volume detached for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/716c5fdd-0e02-4066-9210-93d805b6fe81-ovnkube-script-lib\") on node \"crc\" DevicePath \"\"" Sep 29 19:08:11 crc kubenswrapper[4792]: I0929 19:08:11.598210 4792 reconciler_common.go:293] "Volume detached for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/716c5fdd-0e02-4066-9210-93d805b6fe81-host-run-ovn-kubernetes\") on node \"crc\" DevicePath \"\"" Sep 29 19:08:11 crc kubenswrapper[4792]: I0929 19:08:11.598220 4792 reconciler_common.go:293] "Volume detached for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/716c5fdd-0e02-4066-9210-93d805b6fe81-etc-openvswitch\") on node \"crc\" DevicePath \"\"" Sep 29 19:08:11 crc kubenswrapper[4792]: I0929 19:08:11.598228 4792 reconciler_common.go:293] "Volume detached for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/716c5fdd-0e02-4066-9210-93d805b6fe81-host-run-netns\") on node \"crc\" DevicePath \"\"" Sep 29 19:08:11 crc kubenswrapper[4792]: I0929 19:08:11.598236 4792 reconciler_common.go:293] "Volume detached for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/716c5fdd-0e02-4066-9210-93d805b6fe81-host-cni-bin\") on node \"crc\" DevicePath \"\"" Sep 29 19:08:11 crc kubenswrapper[4792]: I0929 19:08:11.598243 4792 reconciler_common.go:293] "Volume detached for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/716c5fdd-0e02-4066-9210-93d805b6fe81-run-ovn\") on node \"crc\" DevicePath \"\"" Sep 29 19:08:11 crc kubenswrapper[4792]: I0929 19:08:11.600970 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/716c5fdd-0e02-4066-9210-93d805b6fe81-ovn-node-metrics-cert" (OuterVolumeSpecName: "ovn-node-metrics-cert") pod "716c5fdd-0e02-4066-9210-93d805b6fe81" (UID: "716c5fdd-0e02-4066-9210-93d805b6fe81"). InnerVolumeSpecName "ovn-node-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:08:11 crc kubenswrapper[4792]: I0929 19:08:11.601171 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/716c5fdd-0e02-4066-9210-93d805b6fe81-kube-api-access-72xxv" (OuterVolumeSpecName: "kube-api-access-72xxv") pod "716c5fdd-0e02-4066-9210-93d805b6fe81" (UID: "716c5fdd-0e02-4066-9210-93d805b6fe81"). InnerVolumeSpecName "kube-api-access-72xxv". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:08:11 crc kubenswrapper[4792]: I0929 19:08:11.607800 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/716c5fdd-0e02-4066-9210-93d805b6fe81-run-systemd" (OuterVolumeSpecName: "run-systemd") pod "716c5fdd-0e02-4066-9210-93d805b6fe81" (UID: "716c5fdd-0e02-4066-9210-93d805b6fe81"). InnerVolumeSpecName "run-systemd". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 29 19:08:11 crc kubenswrapper[4792]: I0929 19:08:11.699211 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/98a059b7-9711-49f3-8545-b6027d15e657-host-slash\") pod \"ovnkube-node-bs7kw\" (UID: \"98a059b7-9711-49f3-8545-b6027d15e657\") " pod="openshift-ovn-kubernetes/ovnkube-node-bs7kw" Sep 29 19:08:11 crc kubenswrapper[4792]: I0929 19:08:11.699254 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/98a059b7-9711-49f3-8545-b6027d15e657-systemd-units\") pod \"ovnkube-node-bs7kw\" (UID: \"98a059b7-9711-49f3-8545-b6027d15e657\") " pod="openshift-ovn-kubernetes/ovnkube-node-bs7kw" Sep 29 19:08:11 crc kubenswrapper[4792]: I0929 19:08:11.699280 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/98a059b7-9711-49f3-8545-b6027d15e657-host-kubelet\") pod \"ovnkube-node-bs7kw\" (UID: \"98a059b7-9711-49f3-8545-b6027d15e657\") " pod="openshift-ovn-kubernetes/ovnkube-node-bs7kw" Sep 29 19:08:11 crc kubenswrapper[4792]: I0929 19:08:11.699295 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/98a059b7-9711-49f3-8545-b6027d15e657-ovnkube-config\") pod \"ovnkube-node-bs7kw\" (UID: \"98a059b7-9711-49f3-8545-b6027d15e657\") " pod="openshift-ovn-kubernetes/ovnkube-node-bs7kw" Sep 29 19:08:11 crc kubenswrapper[4792]: I0929 19:08:11.699316 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/98a059b7-9711-49f3-8545-b6027d15e657-run-openvswitch\") pod \"ovnkube-node-bs7kw\" (UID: \"98a059b7-9711-49f3-8545-b6027d15e657\") " pod="openshift-ovn-kubernetes/ovnkube-node-bs7kw" Sep 29 19:08:11 crc kubenswrapper[4792]: I0929 19:08:11.699331 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/98a059b7-9711-49f3-8545-b6027d15e657-run-ovn\") pod \"ovnkube-node-bs7kw\" (UID: \"98a059b7-9711-49f3-8545-b6027d15e657\") " pod="openshift-ovn-kubernetes/ovnkube-node-bs7kw" Sep 29 19:08:11 crc kubenswrapper[4792]: I0929 19:08:11.699347 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/98a059b7-9711-49f3-8545-b6027d15e657-ovn-node-metrics-cert\") pod \"ovnkube-node-bs7kw\" (UID: \"98a059b7-9711-49f3-8545-b6027d15e657\") " pod="openshift-ovn-kubernetes/ovnkube-node-bs7kw" Sep 29 19:08:11 crc kubenswrapper[4792]: I0929 19:08:11.699352 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/98a059b7-9711-49f3-8545-b6027d15e657-host-kubelet\") pod \"ovnkube-node-bs7kw\" (UID: \"98a059b7-9711-49f3-8545-b6027d15e657\") " pod="openshift-ovn-kubernetes/ovnkube-node-bs7kw" Sep 29 19:08:11 crc kubenswrapper[4792]: I0929 19:08:11.699365 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/98a059b7-9711-49f3-8545-b6027d15e657-var-lib-openvswitch\") pod \"ovnkube-node-bs7kw\" (UID: \"98a059b7-9711-49f3-8545-b6027d15e657\") " pod="openshift-ovn-kubernetes/ovnkube-node-bs7kw" Sep 29 19:08:11 crc kubenswrapper[4792]: I0929 19:08:11.699382 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/98a059b7-9711-49f3-8545-b6027d15e657-host-cni-netd\") pod \"ovnkube-node-bs7kw\" (UID: \"98a059b7-9711-49f3-8545-b6027d15e657\") " pod="openshift-ovn-kubernetes/ovnkube-node-bs7kw" Sep 29 19:08:11 crc kubenswrapper[4792]: I0929 19:08:11.699397 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/98a059b7-9711-49f3-8545-b6027d15e657-systemd-units\") pod \"ovnkube-node-bs7kw\" (UID: \"98a059b7-9711-49f3-8545-b6027d15e657\") " pod="openshift-ovn-kubernetes/ovnkube-node-bs7kw" Sep 29 19:08:11 crc kubenswrapper[4792]: I0929 19:08:11.699328 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/98a059b7-9711-49f3-8545-b6027d15e657-host-slash\") pod \"ovnkube-node-bs7kw\" (UID: \"98a059b7-9711-49f3-8545-b6027d15e657\") " pod="openshift-ovn-kubernetes/ovnkube-node-bs7kw" Sep 29 19:08:11 crc kubenswrapper[4792]: I0929 19:08:11.699406 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/98a059b7-9711-49f3-8545-b6027d15e657-node-log\") pod \"ovnkube-node-bs7kw\" (UID: \"98a059b7-9711-49f3-8545-b6027d15e657\") " pod="openshift-ovn-kubernetes/ovnkube-node-bs7kw" Sep 29 19:08:11 crc kubenswrapper[4792]: I0929 19:08:11.699427 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/98a059b7-9711-49f3-8545-b6027d15e657-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-bs7kw\" (UID: \"98a059b7-9711-49f3-8545-b6027d15e657\") " pod="openshift-ovn-kubernetes/ovnkube-node-bs7kw" Sep 29 19:08:11 crc kubenswrapper[4792]: I0929 19:08:11.699443 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/98a059b7-9711-49f3-8545-b6027d15e657-etc-openvswitch\") pod \"ovnkube-node-bs7kw\" (UID: \"98a059b7-9711-49f3-8545-b6027d15e657\") " pod="openshift-ovn-kubernetes/ovnkube-node-bs7kw" Sep 29 19:08:11 crc kubenswrapper[4792]: I0929 19:08:11.699458 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/98a059b7-9711-49f3-8545-b6027d15e657-log-socket\") pod \"ovnkube-node-bs7kw\" (UID: \"98a059b7-9711-49f3-8545-b6027d15e657\") " pod="openshift-ovn-kubernetes/ovnkube-node-bs7kw" Sep 29 19:08:11 crc kubenswrapper[4792]: I0929 19:08:11.699476 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/98a059b7-9711-49f3-8545-b6027d15e657-host-cni-bin\") pod \"ovnkube-node-bs7kw\" (UID: \"98a059b7-9711-49f3-8545-b6027d15e657\") " pod="openshift-ovn-kubernetes/ovnkube-node-bs7kw" Sep 29 19:08:11 crc kubenswrapper[4792]: I0929 19:08:11.699491 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/98a059b7-9711-49f3-8545-b6027d15e657-host-run-netns\") pod \"ovnkube-node-bs7kw\" (UID: \"98a059b7-9711-49f3-8545-b6027d15e657\") " pod="openshift-ovn-kubernetes/ovnkube-node-bs7kw" Sep 29 19:08:11 crc kubenswrapper[4792]: I0929 19:08:11.699510 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/98a059b7-9711-49f3-8545-b6027d15e657-run-systemd\") pod \"ovnkube-node-bs7kw\" (UID: \"98a059b7-9711-49f3-8545-b6027d15e657\") " pod="openshift-ovn-kubernetes/ovnkube-node-bs7kw" Sep 29 19:08:11 crc kubenswrapper[4792]: I0929 19:08:11.699526 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/98a059b7-9711-49f3-8545-b6027d15e657-env-overrides\") pod \"ovnkube-node-bs7kw\" (UID: \"98a059b7-9711-49f3-8545-b6027d15e657\") " pod="openshift-ovn-kubernetes/ovnkube-node-bs7kw" Sep 29 19:08:11 crc kubenswrapper[4792]: I0929 19:08:11.699548 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/98a059b7-9711-49f3-8545-b6027d15e657-host-run-ovn-kubernetes\") pod \"ovnkube-node-bs7kw\" (UID: \"98a059b7-9711-49f3-8545-b6027d15e657\") " pod="openshift-ovn-kubernetes/ovnkube-node-bs7kw" Sep 29 19:08:11 crc kubenswrapper[4792]: I0929 19:08:11.699566 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/98a059b7-9711-49f3-8545-b6027d15e657-ovnkube-script-lib\") pod \"ovnkube-node-bs7kw\" (UID: \"98a059b7-9711-49f3-8545-b6027d15e657\") " pod="openshift-ovn-kubernetes/ovnkube-node-bs7kw" Sep 29 19:08:11 crc kubenswrapper[4792]: I0929 19:08:11.699612 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jtwzt\" (UniqueName: \"kubernetes.io/projected/98a059b7-9711-49f3-8545-b6027d15e657-kube-api-access-jtwzt\") pod \"ovnkube-node-bs7kw\" (UID: \"98a059b7-9711-49f3-8545-b6027d15e657\") " pod="openshift-ovn-kubernetes/ovnkube-node-bs7kw" Sep 29 19:08:11 crc kubenswrapper[4792]: I0929 19:08:11.699647 4792 reconciler_common.go:293] "Volume detached for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/716c5fdd-0e02-4066-9210-93d805b6fe81-run-systemd\") on node \"crc\" DevicePath \"\"" Sep 29 19:08:11 crc kubenswrapper[4792]: I0929 19:08:11.699658 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-72xxv\" (UniqueName: \"kubernetes.io/projected/716c5fdd-0e02-4066-9210-93d805b6fe81-kube-api-access-72xxv\") on node \"crc\" DevicePath \"\"" Sep 29 19:08:11 crc kubenswrapper[4792]: I0929 19:08:11.699667 4792 reconciler_common.go:293] "Volume detached for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/716c5fdd-0e02-4066-9210-93d805b6fe81-ovn-node-metrics-cert\") on node \"crc\" DevicePath \"\"" Sep 29 19:08:11 crc kubenswrapper[4792]: I0929 19:08:11.700066 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/98a059b7-9711-49f3-8545-b6027d15e657-ovnkube-config\") pod \"ovnkube-node-bs7kw\" (UID: \"98a059b7-9711-49f3-8545-b6027d15e657\") " pod="openshift-ovn-kubernetes/ovnkube-node-bs7kw" Sep 29 19:08:11 crc kubenswrapper[4792]: I0929 19:08:11.700101 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/98a059b7-9711-49f3-8545-b6027d15e657-run-openvswitch\") pod \"ovnkube-node-bs7kw\" (UID: \"98a059b7-9711-49f3-8545-b6027d15e657\") " pod="openshift-ovn-kubernetes/ovnkube-node-bs7kw" Sep 29 19:08:11 crc kubenswrapper[4792]: I0929 19:08:11.700125 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/98a059b7-9711-49f3-8545-b6027d15e657-run-ovn\") pod \"ovnkube-node-bs7kw\" (UID: \"98a059b7-9711-49f3-8545-b6027d15e657\") " pod="openshift-ovn-kubernetes/ovnkube-node-bs7kw" Sep 29 19:08:11 crc kubenswrapper[4792]: I0929 19:08:11.700145 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/98a059b7-9711-49f3-8545-b6027d15e657-log-socket\") pod \"ovnkube-node-bs7kw\" (UID: \"98a059b7-9711-49f3-8545-b6027d15e657\") " pod="openshift-ovn-kubernetes/ovnkube-node-bs7kw" Sep 29 19:08:11 crc kubenswrapper[4792]: I0929 19:08:11.700163 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/98a059b7-9711-49f3-8545-b6027d15e657-var-lib-openvswitch\") pod \"ovnkube-node-bs7kw\" (UID: \"98a059b7-9711-49f3-8545-b6027d15e657\") " pod="openshift-ovn-kubernetes/ovnkube-node-bs7kw" Sep 29 19:08:11 crc kubenswrapper[4792]: I0929 19:08:11.700183 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/98a059b7-9711-49f3-8545-b6027d15e657-host-cni-netd\") pod \"ovnkube-node-bs7kw\" (UID: \"98a059b7-9711-49f3-8545-b6027d15e657\") " pod="openshift-ovn-kubernetes/ovnkube-node-bs7kw" Sep 29 19:08:11 crc kubenswrapper[4792]: I0929 19:08:11.700203 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/98a059b7-9711-49f3-8545-b6027d15e657-node-log\") pod \"ovnkube-node-bs7kw\" (UID: \"98a059b7-9711-49f3-8545-b6027d15e657\") " pod="openshift-ovn-kubernetes/ovnkube-node-bs7kw" Sep 29 19:08:11 crc kubenswrapper[4792]: I0929 19:08:11.700223 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/98a059b7-9711-49f3-8545-b6027d15e657-etc-openvswitch\") pod \"ovnkube-node-bs7kw\" (UID: \"98a059b7-9711-49f3-8545-b6027d15e657\") " pod="openshift-ovn-kubernetes/ovnkube-node-bs7kw" Sep 29 19:08:11 crc kubenswrapper[4792]: I0929 19:08:11.700245 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/98a059b7-9711-49f3-8545-b6027d15e657-run-systemd\") pod \"ovnkube-node-bs7kw\" (UID: \"98a059b7-9711-49f3-8545-b6027d15e657\") " pod="openshift-ovn-kubernetes/ovnkube-node-bs7kw" Sep 29 19:08:11 crc kubenswrapper[4792]: I0929 19:08:11.700272 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/98a059b7-9711-49f3-8545-b6027d15e657-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-bs7kw\" (UID: \"98a059b7-9711-49f3-8545-b6027d15e657\") " pod="openshift-ovn-kubernetes/ovnkube-node-bs7kw" Sep 29 19:08:11 crc kubenswrapper[4792]: I0929 19:08:11.700289 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/98a059b7-9711-49f3-8545-b6027d15e657-host-cni-bin\") pod \"ovnkube-node-bs7kw\" (UID: \"98a059b7-9711-49f3-8545-b6027d15e657\") " pod="openshift-ovn-kubernetes/ovnkube-node-bs7kw" Sep 29 19:08:11 crc kubenswrapper[4792]: I0929 19:08:11.700316 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/98a059b7-9711-49f3-8545-b6027d15e657-host-run-netns\") pod \"ovnkube-node-bs7kw\" (UID: \"98a059b7-9711-49f3-8545-b6027d15e657\") " pod="openshift-ovn-kubernetes/ovnkube-node-bs7kw" Sep 29 19:08:11 crc kubenswrapper[4792]: I0929 19:08:11.700339 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/98a059b7-9711-49f3-8545-b6027d15e657-host-run-ovn-kubernetes\") pod \"ovnkube-node-bs7kw\" (UID: \"98a059b7-9711-49f3-8545-b6027d15e657\") " pod="openshift-ovn-kubernetes/ovnkube-node-bs7kw" Sep 29 19:08:11 crc kubenswrapper[4792]: I0929 19:08:11.700752 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/98a059b7-9711-49f3-8545-b6027d15e657-env-overrides\") pod \"ovnkube-node-bs7kw\" (UID: \"98a059b7-9711-49f3-8545-b6027d15e657\") " pod="openshift-ovn-kubernetes/ovnkube-node-bs7kw" Sep 29 19:08:11 crc kubenswrapper[4792]: I0929 19:08:11.700933 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/98a059b7-9711-49f3-8545-b6027d15e657-ovnkube-script-lib\") pod \"ovnkube-node-bs7kw\" (UID: \"98a059b7-9711-49f3-8545-b6027d15e657\") " pod="openshift-ovn-kubernetes/ovnkube-node-bs7kw" Sep 29 19:08:11 crc kubenswrapper[4792]: I0929 19:08:11.702200 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/98a059b7-9711-49f3-8545-b6027d15e657-ovn-node-metrics-cert\") pod \"ovnkube-node-bs7kw\" (UID: \"98a059b7-9711-49f3-8545-b6027d15e657\") " pod="openshift-ovn-kubernetes/ovnkube-node-bs7kw" Sep 29 19:08:11 crc kubenswrapper[4792]: I0929 19:08:11.715455 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jtwzt\" (UniqueName: \"kubernetes.io/projected/98a059b7-9711-49f3-8545-b6027d15e657-kube-api-access-jtwzt\") pod \"ovnkube-node-bs7kw\" (UID: \"98a059b7-9711-49f3-8545-b6027d15e657\") " pod="openshift-ovn-kubernetes/ovnkube-node-bs7kw" Sep 29 19:08:11 crc kubenswrapper[4792]: I0929 19:08:11.840546 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-bs7kw" Sep 29 19:08:12 crc kubenswrapper[4792]: I0929 19:08:12.218579 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-hr4cm_716c5fdd-0e02-4066-9210-93d805b6fe81/ovnkube-controller/3.log" Sep 29 19:08:12 crc kubenswrapper[4792]: I0929 19:08:12.220806 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-hr4cm_716c5fdd-0e02-4066-9210-93d805b6fe81/ovn-acl-logging/0.log" Sep 29 19:08:12 crc kubenswrapper[4792]: I0929 19:08:12.221275 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-hr4cm_716c5fdd-0e02-4066-9210-93d805b6fe81/ovn-controller/0.log" Sep 29 19:08:12 crc kubenswrapper[4792]: I0929 19:08:12.221585 4792 generic.go:334] "Generic (PLEG): container finished" podID="716c5fdd-0e02-4066-9210-93d805b6fe81" containerID="7ae9d48d57439129f246e25077eb0e832a613136c8e2873a6194e55667617f86" exitCode=0 Sep 29 19:08:12 crc kubenswrapper[4792]: I0929 19:08:12.221629 4792 generic.go:334] "Generic (PLEG): container finished" podID="716c5fdd-0e02-4066-9210-93d805b6fe81" containerID="7138892e31e3d1949d0ae4789515fc0fd9868469eb14de1464a2f59786b85f08" exitCode=0 Sep 29 19:08:12 crc kubenswrapper[4792]: I0929 19:08:12.221637 4792 generic.go:334] "Generic (PLEG): container finished" podID="716c5fdd-0e02-4066-9210-93d805b6fe81" containerID="3408e50d82d1e7f50d9cd4fb2b4e078059bbc4daba10ca93c3cab56d4fe190be" exitCode=0 Sep 29 19:08:12 crc kubenswrapper[4792]: I0929 19:08:12.221644 4792 generic.go:334] "Generic (PLEG): container finished" podID="716c5fdd-0e02-4066-9210-93d805b6fe81" containerID="a5023531ae972c8f19f5fbf8cdb3c4040f1b63d5d7b9d00e885607f0f84c88a1" exitCode=0 Sep 29 19:08:12 crc kubenswrapper[4792]: I0929 19:08:12.221650 4792 generic.go:334] "Generic (PLEG): container finished" podID="716c5fdd-0e02-4066-9210-93d805b6fe81" containerID="46a3cf64e8fd5f5c75be0dd56175bd00e95e2780c73e39558e3b68ca1e6a44bc" exitCode=0 Sep 29 19:08:12 crc kubenswrapper[4792]: I0929 19:08:12.221656 4792 generic.go:334] "Generic (PLEG): container finished" podID="716c5fdd-0e02-4066-9210-93d805b6fe81" containerID="7b64445ce1e067504326c5005136522f885ba8796579cfb651019d2372a89173" exitCode=0 Sep 29 19:08:12 crc kubenswrapper[4792]: I0929 19:08:12.221661 4792 generic.go:334] "Generic (PLEG): container finished" podID="716c5fdd-0e02-4066-9210-93d805b6fe81" containerID="7e9625b3628f291ecaa686da104d719695bd8c46eb46d08f9eccab27a2013627" exitCode=143 Sep 29 19:08:12 crc kubenswrapper[4792]: I0929 19:08:12.221667 4792 generic.go:334] "Generic (PLEG): container finished" podID="716c5fdd-0e02-4066-9210-93d805b6fe81" containerID="c3a44c0899a9afeaa74bb22565c3f9514603ce1b83f9794539f677d067785925" exitCode=143 Sep 29 19:08:12 crc kubenswrapper[4792]: I0929 19:08:12.221813 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-hr4cm" event={"ID":"716c5fdd-0e02-4066-9210-93d805b6fe81","Type":"ContainerDied","Data":"7ae9d48d57439129f246e25077eb0e832a613136c8e2873a6194e55667617f86"} Sep 29 19:08:12 crc kubenswrapper[4792]: I0929 19:08:12.221861 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-hr4cm" event={"ID":"716c5fdd-0e02-4066-9210-93d805b6fe81","Type":"ContainerDied","Data":"7138892e31e3d1949d0ae4789515fc0fd9868469eb14de1464a2f59786b85f08"} Sep 29 19:08:12 crc kubenswrapper[4792]: I0929 19:08:12.221880 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-hr4cm" event={"ID":"716c5fdd-0e02-4066-9210-93d805b6fe81","Type":"ContainerDied","Data":"3408e50d82d1e7f50d9cd4fb2b4e078059bbc4daba10ca93c3cab56d4fe190be"} Sep 29 19:08:12 crc kubenswrapper[4792]: I0929 19:08:12.221893 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-hr4cm" event={"ID":"716c5fdd-0e02-4066-9210-93d805b6fe81","Type":"ContainerDied","Data":"a5023531ae972c8f19f5fbf8cdb3c4040f1b63d5d7b9d00e885607f0f84c88a1"} Sep 29 19:08:12 crc kubenswrapper[4792]: I0929 19:08:12.221905 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-hr4cm" event={"ID":"716c5fdd-0e02-4066-9210-93d805b6fe81","Type":"ContainerDied","Data":"46a3cf64e8fd5f5c75be0dd56175bd00e95e2780c73e39558e3b68ca1e6a44bc"} Sep 29 19:08:12 crc kubenswrapper[4792]: I0929 19:08:12.221918 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-hr4cm" event={"ID":"716c5fdd-0e02-4066-9210-93d805b6fe81","Type":"ContainerDied","Data":"7b64445ce1e067504326c5005136522f885ba8796579cfb651019d2372a89173"} Sep 29 19:08:12 crc kubenswrapper[4792]: I0929 19:08:12.221931 4792 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"c8f292dcc3508a8a9eee89bc5ade8d636ca7f748ad3ac0af62bf8f356dace386"} Sep 29 19:08:12 crc kubenswrapper[4792]: I0929 19:08:12.221944 4792 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"7138892e31e3d1949d0ae4789515fc0fd9868469eb14de1464a2f59786b85f08"} Sep 29 19:08:12 crc kubenswrapper[4792]: I0929 19:08:12.221952 4792 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"3408e50d82d1e7f50d9cd4fb2b4e078059bbc4daba10ca93c3cab56d4fe190be"} Sep 29 19:08:12 crc kubenswrapper[4792]: I0929 19:08:12.221959 4792 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"a5023531ae972c8f19f5fbf8cdb3c4040f1b63d5d7b9d00e885607f0f84c88a1"} Sep 29 19:08:12 crc kubenswrapper[4792]: I0929 19:08:12.221932 4792 scope.go:117] "RemoveContainer" containerID="7ae9d48d57439129f246e25077eb0e832a613136c8e2873a6194e55667617f86" Sep 29 19:08:12 crc kubenswrapper[4792]: I0929 19:08:12.222006 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-hr4cm" Sep 29 19:08:12 crc kubenswrapper[4792]: I0929 19:08:12.221966 4792 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"46a3cf64e8fd5f5c75be0dd56175bd00e95e2780c73e39558e3b68ca1e6a44bc"} Sep 29 19:08:12 crc kubenswrapper[4792]: I0929 19:08:12.222103 4792 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"7b64445ce1e067504326c5005136522f885ba8796579cfb651019d2372a89173"} Sep 29 19:08:12 crc kubenswrapper[4792]: I0929 19:08:12.222113 4792 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"7e9625b3628f291ecaa686da104d719695bd8c46eb46d08f9eccab27a2013627"} Sep 29 19:08:12 crc kubenswrapper[4792]: I0929 19:08:12.222121 4792 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"c3a44c0899a9afeaa74bb22565c3f9514603ce1b83f9794539f677d067785925"} Sep 29 19:08:12 crc kubenswrapper[4792]: I0929 19:08:12.222130 4792 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"d0516004c2ea4a5711f5e00dcfa01fd5c8d0c0d0d60fd31b0d7da586cd83a820"} Sep 29 19:08:12 crc kubenswrapper[4792]: I0929 19:08:12.222141 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-hr4cm" event={"ID":"716c5fdd-0e02-4066-9210-93d805b6fe81","Type":"ContainerDied","Data":"7e9625b3628f291ecaa686da104d719695bd8c46eb46d08f9eccab27a2013627"} Sep 29 19:08:12 crc kubenswrapper[4792]: I0929 19:08:12.222154 4792 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"7ae9d48d57439129f246e25077eb0e832a613136c8e2873a6194e55667617f86"} Sep 29 19:08:12 crc kubenswrapper[4792]: I0929 19:08:12.222163 4792 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"c8f292dcc3508a8a9eee89bc5ade8d636ca7f748ad3ac0af62bf8f356dace386"} Sep 29 19:08:12 crc kubenswrapper[4792]: I0929 19:08:12.222170 4792 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"7138892e31e3d1949d0ae4789515fc0fd9868469eb14de1464a2f59786b85f08"} Sep 29 19:08:12 crc kubenswrapper[4792]: I0929 19:08:12.222176 4792 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"3408e50d82d1e7f50d9cd4fb2b4e078059bbc4daba10ca93c3cab56d4fe190be"} Sep 29 19:08:12 crc kubenswrapper[4792]: I0929 19:08:12.222183 4792 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"a5023531ae972c8f19f5fbf8cdb3c4040f1b63d5d7b9d00e885607f0f84c88a1"} Sep 29 19:08:12 crc kubenswrapper[4792]: I0929 19:08:12.222190 4792 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"46a3cf64e8fd5f5c75be0dd56175bd00e95e2780c73e39558e3b68ca1e6a44bc"} Sep 29 19:08:12 crc kubenswrapper[4792]: I0929 19:08:12.222197 4792 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"7b64445ce1e067504326c5005136522f885ba8796579cfb651019d2372a89173"} Sep 29 19:08:12 crc kubenswrapper[4792]: I0929 19:08:12.222203 4792 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"7e9625b3628f291ecaa686da104d719695bd8c46eb46d08f9eccab27a2013627"} Sep 29 19:08:12 crc kubenswrapper[4792]: I0929 19:08:12.222209 4792 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"c3a44c0899a9afeaa74bb22565c3f9514603ce1b83f9794539f677d067785925"} Sep 29 19:08:12 crc kubenswrapper[4792]: I0929 19:08:12.222215 4792 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"d0516004c2ea4a5711f5e00dcfa01fd5c8d0c0d0d60fd31b0d7da586cd83a820"} Sep 29 19:08:12 crc kubenswrapper[4792]: I0929 19:08:12.222225 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-hr4cm" event={"ID":"716c5fdd-0e02-4066-9210-93d805b6fe81","Type":"ContainerDied","Data":"c3a44c0899a9afeaa74bb22565c3f9514603ce1b83f9794539f677d067785925"} Sep 29 19:08:12 crc kubenswrapper[4792]: I0929 19:08:12.222235 4792 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"7ae9d48d57439129f246e25077eb0e832a613136c8e2873a6194e55667617f86"} Sep 29 19:08:12 crc kubenswrapper[4792]: I0929 19:08:12.222243 4792 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"c8f292dcc3508a8a9eee89bc5ade8d636ca7f748ad3ac0af62bf8f356dace386"} Sep 29 19:08:12 crc kubenswrapper[4792]: I0929 19:08:12.222249 4792 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"7138892e31e3d1949d0ae4789515fc0fd9868469eb14de1464a2f59786b85f08"} Sep 29 19:08:12 crc kubenswrapper[4792]: I0929 19:08:12.222254 4792 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"3408e50d82d1e7f50d9cd4fb2b4e078059bbc4daba10ca93c3cab56d4fe190be"} Sep 29 19:08:12 crc kubenswrapper[4792]: I0929 19:08:12.222261 4792 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"a5023531ae972c8f19f5fbf8cdb3c4040f1b63d5d7b9d00e885607f0f84c88a1"} Sep 29 19:08:12 crc kubenswrapper[4792]: I0929 19:08:12.222267 4792 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"46a3cf64e8fd5f5c75be0dd56175bd00e95e2780c73e39558e3b68ca1e6a44bc"} Sep 29 19:08:12 crc kubenswrapper[4792]: I0929 19:08:12.222273 4792 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"7b64445ce1e067504326c5005136522f885ba8796579cfb651019d2372a89173"} Sep 29 19:08:12 crc kubenswrapper[4792]: I0929 19:08:12.222278 4792 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"7e9625b3628f291ecaa686da104d719695bd8c46eb46d08f9eccab27a2013627"} Sep 29 19:08:12 crc kubenswrapper[4792]: I0929 19:08:12.222301 4792 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"c3a44c0899a9afeaa74bb22565c3f9514603ce1b83f9794539f677d067785925"} Sep 29 19:08:12 crc kubenswrapper[4792]: I0929 19:08:12.222307 4792 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"d0516004c2ea4a5711f5e00dcfa01fd5c8d0c0d0d60fd31b0d7da586cd83a820"} Sep 29 19:08:12 crc kubenswrapper[4792]: I0929 19:08:12.222316 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-hr4cm" event={"ID":"716c5fdd-0e02-4066-9210-93d805b6fe81","Type":"ContainerDied","Data":"cf070e273fcce7f67e8404720b4fc0f985e467b495a02b9f70058a2809243b6f"} Sep 29 19:08:12 crc kubenswrapper[4792]: I0929 19:08:12.222327 4792 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"7ae9d48d57439129f246e25077eb0e832a613136c8e2873a6194e55667617f86"} Sep 29 19:08:12 crc kubenswrapper[4792]: I0929 19:08:12.222336 4792 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"c8f292dcc3508a8a9eee89bc5ade8d636ca7f748ad3ac0af62bf8f356dace386"} Sep 29 19:08:12 crc kubenswrapper[4792]: I0929 19:08:12.222343 4792 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"7138892e31e3d1949d0ae4789515fc0fd9868469eb14de1464a2f59786b85f08"} Sep 29 19:08:12 crc kubenswrapper[4792]: I0929 19:08:12.222350 4792 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"3408e50d82d1e7f50d9cd4fb2b4e078059bbc4daba10ca93c3cab56d4fe190be"} Sep 29 19:08:12 crc kubenswrapper[4792]: I0929 19:08:12.222357 4792 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"a5023531ae972c8f19f5fbf8cdb3c4040f1b63d5d7b9d00e885607f0f84c88a1"} Sep 29 19:08:12 crc kubenswrapper[4792]: I0929 19:08:12.222364 4792 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"46a3cf64e8fd5f5c75be0dd56175bd00e95e2780c73e39558e3b68ca1e6a44bc"} Sep 29 19:08:12 crc kubenswrapper[4792]: I0929 19:08:12.222370 4792 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"7b64445ce1e067504326c5005136522f885ba8796579cfb651019d2372a89173"} Sep 29 19:08:12 crc kubenswrapper[4792]: I0929 19:08:12.222377 4792 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"7e9625b3628f291ecaa686da104d719695bd8c46eb46d08f9eccab27a2013627"} Sep 29 19:08:12 crc kubenswrapper[4792]: I0929 19:08:12.222383 4792 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"c3a44c0899a9afeaa74bb22565c3f9514603ce1b83f9794539f677d067785925"} Sep 29 19:08:12 crc kubenswrapper[4792]: I0929 19:08:12.222390 4792 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"d0516004c2ea4a5711f5e00dcfa01fd5c8d0c0d0d60fd31b0d7da586cd83a820"} Sep 29 19:08:12 crc kubenswrapper[4792]: I0929 19:08:12.226623 4792 generic.go:334] "Generic (PLEG): container finished" podID="98a059b7-9711-49f3-8545-b6027d15e657" containerID="bc9464ad4644357bf8943f5558c432645dd8ef3dc56d1aec2373e59d5a215345" exitCode=0 Sep 29 19:08:12 crc kubenswrapper[4792]: I0929 19:08:12.226725 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-bs7kw" event={"ID":"98a059b7-9711-49f3-8545-b6027d15e657","Type":"ContainerDied","Data":"bc9464ad4644357bf8943f5558c432645dd8ef3dc56d1aec2373e59d5a215345"} Sep 29 19:08:12 crc kubenswrapper[4792]: I0929 19:08:12.226766 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-bs7kw" event={"ID":"98a059b7-9711-49f3-8545-b6027d15e657","Type":"ContainerStarted","Data":"19c777d6ec9ef1bb322524f5ba2a37a2553a323e3db63e2df2538c4d82e8748b"} Sep 29 19:08:12 crc kubenswrapper[4792]: I0929 19:08:12.231216 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-5hwvp_100876d3-2539-47f1-91fa-0f91456ccac1/kube-multus/2.log" Sep 29 19:08:12 crc kubenswrapper[4792]: I0929 19:08:12.245938 4792 scope.go:117] "RemoveContainer" containerID="c8f292dcc3508a8a9eee89bc5ade8d636ca7f748ad3ac0af62bf8f356dace386" Sep 29 19:08:12 crc kubenswrapper[4792]: I0929 19:08:12.314470 4792 scope.go:117] "RemoveContainer" containerID="7138892e31e3d1949d0ae4789515fc0fd9868469eb14de1464a2f59786b85f08" Sep 29 19:08:12 crc kubenswrapper[4792]: I0929 19:08:12.356178 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-hr4cm"] Sep 29 19:08:12 crc kubenswrapper[4792]: I0929 19:08:12.358596 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-hr4cm"] Sep 29 19:08:12 crc kubenswrapper[4792]: I0929 19:08:12.363554 4792 scope.go:117] "RemoveContainer" containerID="3408e50d82d1e7f50d9cd4fb2b4e078059bbc4daba10ca93c3cab56d4fe190be" Sep 29 19:08:12 crc kubenswrapper[4792]: I0929 19:08:12.382996 4792 scope.go:117] "RemoveContainer" containerID="a5023531ae972c8f19f5fbf8cdb3c4040f1b63d5d7b9d00e885607f0f84c88a1" Sep 29 19:08:12 crc kubenswrapper[4792]: I0929 19:08:12.399923 4792 scope.go:117] "RemoveContainer" containerID="46a3cf64e8fd5f5c75be0dd56175bd00e95e2780c73e39558e3b68ca1e6a44bc" Sep 29 19:08:12 crc kubenswrapper[4792]: I0929 19:08:12.445237 4792 scope.go:117] "RemoveContainer" containerID="7b64445ce1e067504326c5005136522f885ba8796579cfb651019d2372a89173" Sep 29 19:08:12 crc kubenswrapper[4792]: I0929 19:08:12.459636 4792 scope.go:117] "RemoveContainer" containerID="7e9625b3628f291ecaa686da104d719695bd8c46eb46d08f9eccab27a2013627" Sep 29 19:08:12 crc kubenswrapper[4792]: I0929 19:08:12.491112 4792 scope.go:117] "RemoveContainer" containerID="c3a44c0899a9afeaa74bb22565c3f9514603ce1b83f9794539f677d067785925" Sep 29 19:08:12 crc kubenswrapper[4792]: I0929 19:08:12.518722 4792 scope.go:117] "RemoveContainer" containerID="d0516004c2ea4a5711f5e00dcfa01fd5c8d0c0d0d60fd31b0d7da586cd83a820" Sep 29 19:08:12 crc kubenswrapper[4792]: I0929 19:08:12.544143 4792 scope.go:117] "RemoveContainer" containerID="7ae9d48d57439129f246e25077eb0e832a613136c8e2873a6194e55667617f86" Sep 29 19:08:12 crc kubenswrapper[4792]: E0929 19:08:12.545590 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7ae9d48d57439129f246e25077eb0e832a613136c8e2873a6194e55667617f86\": container with ID starting with 7ae9d48d57439129f246e25077eb0e832a613136c8e2873a6194e55667617f86 not found: ID does not exist" containerID="7ae9d48d57439129f246e25077eb0e832a613136c8e2873a6194e55667617f86" Sep 29 19:08:12 crc kubenswrapper[4792]: I0929 19:08:12.545636 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7ae9d48d57439129f246e25077eb0e832a613136c8e2873a6194e55667617f86"} err="failed to get container status \"7ae9d48d57439129f246e25077eb0e832a613136c8e2873a6194e55667617f86\": rpc error: code = NotFound desc = could not find container \"7ae9d48d57439129f246e25077eb0e832a613136c8e2873a6194e55667617f86\": container with ID starting with 7ae9d48d57439129f246e25077eb0e832a613136c8e2873a6194e55667617f86 not found: ID does not exist" Sep 29 19:08:12 crc kubenswrapper[4792]: I0929 19:08:12.545664 4792 scope.go:117] "RemoveContainer" containerID="c8f292dcc3508a8a9eee89bc5ade8d636ca7f748ad3ac0af62bf8f356dace386" Sep 29 19:08:12 crc kubenswrapper[4792]: E0929 19:08:12.546000 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c8f292dcc3508a8a9eee89bc5ade8d636ca7f748ad3ac0af62bf8f356dace386\": container with ID starting with c8f292dcc3508a8a9eee89bc5ade8d636ca7f748ad3ac0af62bf8f356dace386 not found: ID does not exist" containerID="c8f292dcc3508a8a9eee89bc5ade8d636ca7f748ad3ac0af62bf8f356dace386" Sep 29 19:08:12 crc kubenswrapper[4792]: I0929 19:08:12.546023 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c8f292dcc3508a8a9eee89bc5ade8d636ca7f748ad3ac0af62bf8f356dace386"} err="failed to get container status \"c8f292dcc3508a8a9eee89bc5ade8d636ca7f748ad3ac0af62bf8f356dace386\": rpc error: code = NotFound desc = could not find container \"c8f292dcc3508a8a9eee89bc5ade8d636ca7f748ad3ac0af62bf8f356dace386\": container with ID starting with c8f292dcc3508a8a9eee89bc5ade8d636ca7f748ad3ac0af62bf8f356dace386 not found: ID does not exist" Sep 29 19:08:12 crc kubenswrapper[4792]: I0929 19:08:12.546037 4792 scope.go:117] "RemoveContainer" containerID="7138892e31e3d1949d0ae4789515fc0fd9868469eb14de1464a2f59786b85f08" Sep 29 19:08:12 crc kubenswrapper[4792]: E0929 19:08:12.546631 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7138892e31e3d1949d0ae4789515fc0fd9868469eb14de1464a2f59786b85f08\": container with ID starting with 7138892e31e3d1949d0ae4789515fc0fd9868469eb14de1464a2f59786b85f08 not found: ID does not exist" containerID="7138892e31e3d1949d0ae4789515fc0fd9868469eb14de1464a2f59786b85f08" Sep 29 19:08:12 crc kubenswrapper[4792]: I0929 19:08:12.546674 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7138892e31e3d1949d0ae4789515fc0fd9868469eb14de1464a2f59786b85f08"} err="failed to get container status \"7138892e31e3d1949d0ae4789515fc0fd9868469eb14de1464a2f59786b85f08\": rpc error: code = NotFound desc = could not find container \"7138892e31e3d1949d0ae4789515fc0fd9868469eb14de1464a2f59786b85f08\": container with ID starting with 7138892e31e3d1949d0ae4789515fc0fd9868469eb14de1464a2f59786b85f08 not found: ID does not exist" Sep 29 19:08:12 crc kubenswrapper[4792]: I0929 19:08:12.546699 4792 scope.go:117] "RemoveContainer" containerID="3408e50d82d1e7f50d9cd4fb2b4e078059bbc4daba10ca93c3cab56d4fe190be" Sep 29 19:08:12 crc kubenswrapper[4792]: E0929 19:08:12.547067 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3408e50d82d1e7f50d9cd4fb2b4e078059bbc4daba10ca93c3cab56d4fe190be\": container with ID starting with 3408e50d82d1e7f50d9cd4fb2b4e078059bbc4daba10ca93c3cab56d4fe190be not found: ID does not exist" containerID="3408e50d82d1e7f50d9cd4fb2b4e078059bbc4daba10ca93c3cab56d4fe190be" Sep 29 19:08:12 crc kubenswrapper[4792]: I0929 19:08:12.547094 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3408e50d82d1e7f50d9cd4fb2b4e078059bbc4daba10ca93c3cab56d4fe190be"} err="failed to get container status \"3408e50d82d1e7f50d9cd4fb2b4e078059bbc4daba10ca93c3cab56d4fe190be\": rpc error: code = NotFound desc = could not find container \"3408e50d82d1e7f50d9cd4fb2b4e078059bbc4daba10ca93c3cab56d4fe190be\": container with ID starting with 3408e50d82d1e7f50d9cd4fb2b4e078059bbc4daba10ca93c3cab56d4fe190be not found: ID does not exist" Sep 29 19:08:12 crc kubenswrapper[4792]: I0929 19:08:12.547108 4792 scope.go:117] "RemoveContainer" containerID="a5023531ae972c8f19f5fbf8cdb3c4040f1b63d5d7b9d00e885607f0f84c88a1" Sep 29 19:08:12 crc kubenswrapper[4792]: E0929 19:08:12.548042 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a5023531ae972c8f19f5fbf8cdb3c4040f1b63d5d7b9d00e885607f0f84c88a1\": container with ID starting with a5023531ae972c8f19f5fbf8cdb3c4040f1b63d5d7b9d00e885607f0f84c88a1 not found: ID does not exist" containerID="a5023531ae972c8f19f5fbf8cdb3c4040f1b63d5d7b9d00e885607f0f84c88a1" Sep 29 19:08:12 crc kubenswrapper[4792]: I0929 19:08:12.548068 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a5023531ae972c8f19f5fbf8cdb3c4040f1b63d5d7b9d00e885607f0f84c88a1"} err="failed to get container status \"a5023531ae972c8f19f5fbf8cdb3c4040f1b63d5d7b9d00e885607f0f84c88a1\": rpc error: code = NotFound desc = could not find container \"a5023531ae972c8f19f5fbf8cdb3c4040f1b63d5d7b9d00e885607f0f84c88a1\": container with ID starting with a5023531ae972c8f19f5fbf8cdb3c4040f1b63d5d7b9d00e885607f0f84c88a1 not found: ID does not exist" Sep 29 19:08:12 crc kubenswrapper[4792]: I0929 19:08:12.548082 4792 scope.go:117] "RemoveContainer" containerID="46a3cf64e8fd5f5c75be0dd56175bd00e95e2780c73e39558e3b68ca1e6a44bc" Sep 29 19:08:12 crc kubenswrapper[4792]: E0929 19:08:12.548352 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"46a3cf64e8fd5f5c75be0dd56175bd00e95e2780c73e39558e3b68ca1e6a44bc\": container with ID starting with 46a3cf64e8fd5f5c75be0dd56175bd00e95e2780c73e39558e3b68ca1e6a44bc not found: ID does not exist" containerID="46a3cf64e8fd5f5c75be0dd56175bd00e95e2780c73e39558e3b68ca1e6a44bc" Sep 29 19:08:12 crc kubenswrapper[4792]: I0929 19:08:12.548372 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"46a3cf64e8fd5f5c75be0dd56175bd00e95e2780c73e39558e3b68ca1e6a44bc"} err="failed to get container status \"46a3cf64e8fd5f5c75be0dd56175bd00e95e2780c73e39558e3b68ca1e6a44bc\": rpc error: code = NotFound desc = could not find container \"46a3cf64e8fd5f5c75be0dd56175bd00e95e2780c73e39558e3b68ca1e6a44bc\": container with ID starting with 46a3cf64e8fd5f5c75be0dd56175bd00e95e2780c73e39558e3b68ca1e6a44bc not found: ID does not exist" Sep 29 19:08:12 crc kubenswrapper[4792]: I0929 19:08:12.548385 4792 scope.go:117] "RemoveContainer" containerID="7b64445ce1e067504326c5005136522f885ba8796579cfb651019d2372a89173" Sep 29 19:08:12 crc kubenswrapper[4792]: E0929 19:08:12.548594 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7b64445ce1e067504326c5005136522f885ba8796579cfb651019d2372a89173\": container with ID starting with 7b64445ce1e067504326c5005136522f885ba8796579cfb651019d2372a89173 not found: ID does not exist" containerID="7b64445ce1e067504326c5005136522f885ba8796579cfb651019d2372a89173" Sep 29 19:08:12 crc kubenswrapper[4792]: I0929 19:08:12.548610 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7b64445ce1e067504326c5005136522f885ba8796579cfb651019d2372a89173"} err="failed to get container status \"7b64445ce1e067504326c5005136522f885ba8796579cfb651019d2372a89173\": rpc error: code = NotFound desc = could not find container \"7b64445ce1e067504326c5005136522f885ba8796579cfb651019d2372a89173\": container with ID starting with 7b64445ce1e067504326c5005136522f885ba8796579cfb651019d2372a89173 not found: ID does not exist" Sep 29 19:08:12 crc kubenswrapper[4792]: I0929 19:08:12.548624 4792 scope.go:117] "RemoveContainer" containerID="7e9625b3628f291ecaa686da104d719695bd8c46eb46d08f9eccab27a2013627" Sep 29 19:08:12 crc kubenswrapper[4792]: E0929 19:08:12.549192 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7e9625b3628f291ecaa686da104d719695bd8c46eb46d08f9eccab27a2013627\": container with ID starting with 7e9625b3628f291ecaa686da104d719695bd8c46eb46d08f9eccab27a2013627 not found: ID does not exist" containerID="7e9625b3628f291ecaa686da104d719695bd8c46eb46d08f9eccab27a2013627" Sep 29 19:08:12 crc kubenswrapper[4792]: I0929 19:08:12.549215 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7e9625b3628f291ecaa686da104d719695bd8c46eb46d08f9eccab27a2013627"} err="failed to get container status \"7e9625b3628f291ecaa686da104d719695bd8c46eb46d08f9eccab27a2013627\": rpc error: code = NotFound desc = could not find container \"7e9625b3628f291ecaa686da104d719695bd8c46eb46d08f9eccab27a2013627\": container with ID starting with 7e9625b3628f291ecaa686da104d719695bd8c46eb46d08f9eccab27a2013627 not found: ID does not exist" Sep 29 19:08:12 crc kubenswrapper[4792]: I0929 19:08:12.549226 4792 scope.go:117] "RemoveContainer" containerID="c3a44c0899a9afeaa74bb22565c3f9514603ce1b83f9794539f677d067785925" Sep 29 19:08:12 crc kubenswrapper[4792]: E0929 19:08:12.549451 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c3a44c0899a9afeaa74bb22565c3f9514603ce1b83f9794539f677d067785925\": container with ID starting with c3a44c0899a9afeaa74bb22565c3f9514603ce1b83f9794539f677d067785925 not found: ID does not exist" containerID="c3a44c0899a9afeaa74bb22565c3f9514603ce1b83f9794539f677d067785925" Sep 29 19:08:12 crc kubenswrapper[4792]: I0929 19:08:12.549472 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c3a44c0899a9afeaa74bb22565c3f9514603ce1b83f9794539f677d067785925"} err="failed to get container status \"c3a44c0899a9afeaa74bb22565c3f9514603ce1b83f9794539f677d067785925\": rpc error: code = NotFound desc = could not find container \"c3a44c0899a9afeaa74bb22565c3f9514603ce1b83f9794539f677d067785925\": container with ID starting with c3a44c0899a9afeaa74bb22565c3f9514603ce1b83f9794539f677d067785925 not found: ID does not exist" Sep 29 19:08:12 crc kubenswrapper[4792]: I0929 19:08:12.549483 4792 scope.go:117] "RemoveContainer" containerID="d0516004c2ea4a5711f5e00dcfa01fd5c8d0c0d0d60fd31b0d7da586cd83a820" Sep 29 19:08:12 crc kubenswrapper[4792]: E0929 19:08:12.549731 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d0516004c2ea4a5711f5e00dcfa01fd5c8d0c0d0d60fd31b0d7da586cd83a820\": container with ID starting with d0516004c2ea4a5711f5e00dcfa01fd5c8d0c0d0d60fd31b0d7da586cd83a820 not found: ID does not exist" containerID="d0516004c2ea4a5711f5e00dcfa01fd5c8d0c0d0d60fd31b0d7da586cd83a820" Sep 29 19:08:12 crc kubenswrapper[4792]: I0929 19:08:12.549754 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d0516004c2ea4a5711f5e00dcfa01fd5c8d0c0d0d60fd31b0d7da586cd83a820"} err="failed to get container status \"d0516004c2ea4a5711f5e00dcfa01fd5c8d0c0d0d60fd31b0d7da586cd83a820\": rpc error: code = NotFound desc = could not find container \"d0516004c2ea4a5711f5e00dcfa01fd5c8d0c0d0d60fd31b0d7da586cd83a820\": container with ID starting with d0516004c2ea4a5711f5e00dcfa01fd5c8d0c0d0d60fd31b0d7da586cd83a820 not found: ID does not exist" Sep 29 19:08:12 crc kubenswrapper[4792]: I0929 19:08:12.549770 4792 scope.go:117] "RemoveContainer" containerID="7ae9d48d57439129f246e25077eb0e832a613136c8e2873a6194e55667617f86" Sep 29 19:08:12 crc kubenswrapper[4792]: I0929 19:08:12.550070 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7ae9d48d57439129f246e25077eb0e832a613136c8e2873a6194e55667617f86"} err="failed to get container status \"7ae9d48d57439129f246e25077eb0e832a613136c8e2873a6194e55667617f86\": rpc error: code = NotFound desc = could not find container \"7ae9d48d57439129f246e25077eb0e832a613136c8e2873a6194e55667617f86\": container with ID starting with 7ae9d48d57439129f246e25077eb0e832a613136c8e2873a6194e55667617f86 not found: ID does not exist" Sep 29 19:08:12 crc kubenswrapper[4792]: I0929 19:08:12.550094 4792 scope.go:117] "RemoveContainer" containerID="c8f292dcc3508a8a9eee89bc5ade8d636ca7f748ad3ac0af62bf8f356dace386" Sep 29 19:08:12 crc kubenswrapper[4792]: I0929 19:08:12.550291 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c8f292dcc3508a8a9eee89bc5ade8d636ca7f748ad3ac0af62bf8f356dace386"} err="failed to get container status \"c8f292dcc3508a8a9eee89bc5ade8d636ca7f748ad3ac0af62bf8f356dace386\": rpc error: code = NotFound desc = could not find container \"c8f292dcc3508a8a9eee89bc5ade8d636ca7f748ad3ac0af62bf8f356dace386\": container with ID starting with c8f292dcc3508a8a9eee89bc5ade8d636ca7f748ad3ac0af62bf8f356dace386 not found: ID does not exist" Sep 29 19:08:12 crc kubenswrapper[4792]: I0929 19:08:12.550309 4792 scope.go:117] "RemoveContainer" containerID="7138892e31e3d1949d0ae4789515fc0fd9868469eb14de1464a2f59786b85f08" Sep 29 19:08:12 crc kubenswrapper[4792]: I0929 19:08:12.550444 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7138892e31e3d1949d0ae4789515fc0fd9868469eb14de1464a2f59786b85f08"} err="failed to get container status \"7138892e31e3d1949d0ae4789515fc0fd9868469eb14de1464a2f59786b85f08\": rpc error: code = NotFound desc = could not find container \"7138892e31e3d1949d0ae4789515fc0fd9868469eb14de1464a2f59786b85f08\": container with ID starting with 7138892e31e3d1949d0ae4789515fc0fd9868469eb14de1464a2f59786b85f08 not found: ID does not exist" Sep 29 19:08:12 crc kubenswrapper[4792]: I0929 19:08:12.550462 4792 scope.go:117] "RemoveContainer" containerID="3408e50d82d1e7f50d9cd4fb2b4e078059bbc4daba10ca93c3cab56d4fe190be" Sep 29 19:08:12 crc kubenswrapper[4792]: I0929 19:08:12.553969 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3408e50d82d1e7f50d9cd4fb2b4e078059bbc4daba10ca93c3cab56d4fe190be"} err="failed to get container status \"3408e50d82d1e7f50d9cd4fb2b4e078059bbc4daba10ca93c3cab56d4fe190be\": rpc error: code = NotFound desc = could not find container \"3408e50d82d1e7f50d9cd4fb2b4e078059bbc4daba10ca93c3cab56d4fe190be\": container with ID starting with 3408e50d82d1e7f50d9cd4fb2b4e078059bbc4daba10ca93c3cab56d4fe190be not found: ID does not exist" Sep 29 19:08:12 crc kubenswrapper[4792]: I0929 19:08:12.553993 4792 scope.go:117] "RemoveContainer" containerID="a5023531ae972c8f19f5fbf8cdb3c4040f1b63d5d7b9d00e885607f0f84c88a1" Sep 29 19:08:12 crc kubenswrapper[4792]: I0929 19:08:12.554267 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a5023531ae972c8f19f5fbf8cdb3c4040f1b63d5d7b9d00e885607f0f84c88a1"} err="failed to get container status \"a5023531ae972c8f19f5fbf8cdb3c4040f1b63d5d7b9d00e885607f0f84c88a1\": rpc error: code = NotFound desc = could not find container \"a5023531ae972c8f19f5fbf8cdb3c4040f1b63d5d7b9d00e885607f0f84c88a1\": container with ID starting with a5023531ae972c8f19f5fbf8cdb3c4040f1b63d5d7b9d00e885607f0f84c88a1 not found: ID does not exist" Sep 29 19:08:12 crc kubenswrapper[4792]: I0929 19:08:12.554318 4792 scope.go:117] "RemoveContainer" containerID="46a3cf64e8fd5f5c75be0dd56175bd00e95e2780c73e39558e3b68ca1e6a44bc" Sep 29 19:08:12 crc kubenswrapper[4792]: I0929 19:08:12.554593 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"46a3cf64e8fd5f5c75be0dd56175bd00e95e2780c73e39558e3b68ca1e6a44bc"} err="failed to get container status \"46a3cf64e8fd5f5c75be0dd56175bd00e95e2780c73e39558e3b68ca1e6a44bc\": rpc error: code = NotFound desc = could not find container \"46a3cf64e8fd5f5c75be0dd56175bd00e95e2780c73e39558e3b68ca1e6a44bc\": container with ID starting with 46a3cf64e8fd5f5c75be0dd56175bd00e95e2780c73e39558e3b68ca1e6a44bc not found: ID does not exist" Sep 29 19:08:12 crc kubenswrapper[4792]: I0929 19:08:12.554620 4792 scope.go:117] "RemoveContainer" containerID="7b64445ce1e067504326c5005136522f885ba8796579cfb651019d2372a89173" Sep 29 19:08:12 crc kubenswrapper[4792]: I0929 19:08:12.554907 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7b64445ce1e067504326c5005136522f885ba8796579cfb651019d2372a89173"} err="failed to get container status \"7b64445ce1e067504326c5005136522f885ba8796579cfb651019d2372a89173\": rpc error: code = NotFound desc = could not find container \"7b64445ce1e067504326c5005136522f885ba8796579cfb651019d2372a89173\": container with ID starting with 7b64445ce1e067504326c5005136522f885ba8796579cfb651019d2372a89173 not found: ID does not exist" Sep 29 19:08:12 crc kubenswrapper[4792]: I0929 19:08:12.554956 4792 scope.go:117] "RemoveContainer" containerID="7e9625b3628f291ecaa686da104d719695bd8c46eb46d08f9eccab27a2013627" Sep 29 19:08:12 crc kubenswrapper[4792]: I0929 19:08:12.555238 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7e9625b3628f291ecaa686da104d719695bd8c46eb46d08f9eccab27a2013627"} err="failed to get container status \"7e9625b3628f291ecaa686da104d719695bd8c46eb46d08f9eccab27a2013627\": rpc error: code = NotFound desc = could not find container \"7e9625b3628f291ecaa686da104d719695bd8c46eb46d08f9eccab27a2013627\": container with ID starting with 7e9625b3628f291ecaa686da104d719695bd8c46eb46d08f9eccab27a2013627 not found: ID does not exist" Sep 29 19:08:12 crc kubenswrapper[4792]: I0929 19:08:12.555264 4792 scope.go:117] "RemoveContainer" containerID="c3a44c0899a9afeaa74bb22565c3f9514603ce1b83f9794539f677d067785925" Sep 29 19:08:12 crc kubenswrapper[4792]: I0929 19:08:12.556287 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c3a44c0899a9afeaa74bb22565c3f9514603ce1b83f9794539f677d067785925"} err="failed to get container status \"c3a44c0899a9afeaa74bb22565c3f9514603ce1b83f9794539f677d067785925\": rpc error: code = NotFound desc = could not find container \"c3a44c0899a9afeaa74bb22565c3f9514603ce1b83f9794539f677d067785925\": container with ID starting with c3a44c0899a9afeaa74bb22565c3f9514603ce1b83f9794539f677d067785925 not found: ID does not exist" Sep 29 19:08:12 crc kubenswrapper[4792]: I0929 19:08:12.556337 4792 scope.go:117] "RemoveContainer" containerID="d0516004c2ea4a5711f5e00dcfa01fd5c8d0c0d0d60fd31b0d7da586cd83a820" Sep 29 19:08:12 crc kubenswrapper[4792]: I0929 19:08:12.557427 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d0516004c2ea4a5711f5e00dcfa01fd5c8d0c0d0d60fd31b0d7da586cd83a820"} err="failed to get container status \"d0516004c2ea4a5711f5e00dcfa01fd5c8d0c0d0d60fd31b0d7da586cd83a820\": rpc error: code = NotFound desc = could not find container \"d0516004c2ea4a5711f5e00dcfa01fd5c8d0c0d0d60fd31b0d7da586cd83a820\": container with ID starting with d0516004c2ea4a5711f5e00dcfa01fd5c8d0c0d0d60fd31b0d7da586cd83a820 not found: ID does not exist" Sep 29 19:08:12 crc kubenswrapper[4792]: I0929 19:08:12.557458 4792 scope.go:117] "RemoveContainer" containerID="7ae9d48d57439129f246e25077eb0e832a613136c8e2873a6194e55667617f86" Sep 29 19:08:12 crc kubenswrapper[4792]: I0929 19:08:12.557795 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7ae9d48d57439129f246e25077eb0e832a613136c8e2873a6194e55667617f86"} err="failed to get container status \"7ae9d48d57439129f246e25077eb0e832a613136c8e2873a6194e55667617f86\": rpc error: code = NotFound desc = could not find container \"7ae9d48d57439129f246e25077eb0e832a613136c8e2873a6194e55667617f86\": container with ID starting with 7ae9d48d57439129f246e25077eb0e832a613136c8e2873a6194e55667617f86 not found: ID does not exist" Sep 29 19:08:12 crc kubenswrapper[4792]: I0929 19:08:12.557819 4792 scope.go:117] "RemoveContainer" containerID="c8f292dcc3508a8a9eee89bc5ade8d636ca7f748ad3ac0af62bf8f356dace386" Sep 29 19:08:12 crc kubenswrapper[4792]: I0929 19:08:12.558189 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c8f292dcc3508a8a9eee89bc5ade8d636ca7f748ad3ac0af62bf8f356dace386"} err="failed to get container status \"c8f292dcc3508a8a9eee89bc5ade8d636ca7f748ad3ac0af62bf8f356dace386\": rpc error: code = NotFound desc = could not find container \"c8f292dcc3508a8a9eee89bc5ade8d636ca7f748ad3ac0af62bf8f356dace386\": container with ID starting with c8f292dcc3508a8a9eee89bc5ade8d636ca7f748ad3ac0af62bf8f356dace386 not found: ID does not exist" Sep 29 19:08:12 crc kubenswrapper[4792]: I0929 19:08:12.558213 4792 scope.go:117] "RemoveContainer" containerID="7138892e31e3d1949d0ae4789515fc0fd9868469eb14de1464a2f59786b85f08" Sep 29 19:08:12 crc kubenswrapper[4792]: I0929 19:08:12.558616 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7138892e31e3d1949d0ae4789515fc0fd9868469eb14de1464a2f59786b85f08"} err="failed to get container status \"7138892e31e3d1949d0ae4789515fc0fd9868469eb14de1464a2f59786b85f08\": rpc error: code = NotFound desc = could not find container \"7138892e31e3d1949d0ae4789515fc0fd9868469eb14de1464a2f59786b85f08\": container with ID starting with 7138892e31e3d1949d0ae4789515fc0fd9868469eb14de1464a2f59786b85f08 not found: ID does not exist" Sep 29 19:08:12 crc kubenswrapper[4792]: I0929 19:08:12.558665 4792 scope.go:117] "RemoveContainer" containerID="3408e50d82d1e7f50d9cd4fb2b4e078059bbc4daba10ca93c3cab56d4fe190be" Sep 29 19:08:12 crc kubenswrapper[4792]: I0929 19:08:12.558997 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3408e50d82d1e7f50d9cd4fb2b4e078059bbc4daba10ca93c3cab56d4fe190be"} err="failed to get container status \"3408e50d82d1e7f50d9cd4fb2b4e078059bbc4daba10ca93c3cab56d4fe190be\": rpc error: code = NotFound desc = could not find container \"3408e50d82d1e7f50d9cd4fb2b4e078059bbc4daba10ca93c3cab56d4fe190be\": container with ID starting with 3408e50d82d1e7f50d9cd4fb2b4e078059bbc4daba10ca93c3cab56d4fe190be not found: ID does not exist" Sep 29 19:08:12 crc kubenswrapper[4792]: I0929 19:08:12.559021 4792 scope.go:117] "RemoveContainer" containerID="a5023531ae972c8f19f5fbf8cdb3c4040f1b63d5d7b9d00e885607f0f84c88a1" Sep 29 19:08:12 crc kubenswrapper[4792]: I0929 19:08:12.559332 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a5023531ae972c8f19f5fbf8cdb3c4040f1b63d5d7b9d00e885607f0f84c88a1"} err="failed to get container status \"a5023531ae972c8f19f5fbf8cdb3c4040f1b63d5d7b9d00e885607f0f84c88a1\": rpc error: code = NotFound desc = could not find container \"a5023531ae972c8f19f5fbf8cdb3c4040f1b63d5d7b9d00e885607f0f84c88a1\": container with ID starting with a5023531ae972c8f19f5fbf8cdb3c4040f1b63d5d7b9d00e885607f0f84c88a1 not found: ID does not exist" Sep 29 19:08:12 crc kubenswrapper[4792]: I0929 19:08:12.559356 4792 scope.go:117] "RemoveContainer" containerID="46a3cf64e8fd5f5c75be0dd56175bd00e95e2780c73e39558e3b68ca1e6a44bc" Sep 29 19:08:12 crc kubenswrapper[4792]: I0929 19:08:12.560740 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"46a3cf64e8fd5f5c75be0dd56175bd00e95e2780c73e39558e3b68ca1e6a44bc"} err="failed to get container status \"46a3cf64e8fd5f5c75be0dd56175bd00e95e2780c73e39558e3b68ca1e6a44bc\": rpc error: code = NotFound desc = could not find container \"46a3cf64e8fd5f5c75be0dd56175bd00e95e2780c73e39558e3b68ca1e6a44bc\": container with ID starting with 46a3cf64e8fd5f5c75be0dd56175bd00e95e2780c73e39558e3b68ca1e6a44bc not found: ID does not exist" Sep 29 19:08:12 crc kubenswrapper[4792]: I0929 19:08:12.560762 4792 scope.go:117] "RemoveContainer" containerID="7b64445ce1e067504326c5005136522f885ba8796579cfb651019d2372a89173" Sep 29 19:08:12 crc kubenswrapper[4792]: I0929 19:08:12.561131 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7b64445ce1e067504326c5005136522f885ba8796579cfb651019d2372a89173"} err="failed to get container status \"7b64445ce1e067504326c5005136522f885ba8796579cfb651019d2372a89173\": rpc error: code = NotFound desc = could not find container \"7b64445ce1e067504326c5005136522f885ba8796579cfb651019d2372a89173\": container with ID starting with 7b64445ce1e067504326c5005136522f885ba8796579cfb651019d2372a89173 not found: ID does not exist" Sep 29 19:08:12 crc kubenswrapper[4792]: I0929 19:08:12.561161 4792 scope.go:117] "RemoveContainer" containerID="7e9625b3628f291ecaa686da104d719695bd8c46eb46d08f9eccab27a2013627" Sep 29 19:08:12 crc kubenswrapper[4792]: I0929 19:08:12.564059 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7e9625b3628f291ecaa686da104d719695bd8c46eb46d08f9eccab27a2013627"} err="failed to get container status \"7e9625b3628f291ecaa686da104d719695bd8c46eb46d08f9eccab27a2013627\": rpc error: code = NotFound desc = could not find container \"7e9625b3628f291ecaa686da104d719695bd8c46eb46d08f9eccab27a2013627\": container with ID starting with 7e9625b3628f291ecaa686da104d719695bd8c46eb46d08f9eccab27a2013627 not found: ID does not exist" Sep 29 19:08:12 crc kubenswrapper[4792]: I0929 19:08:12.564086 4792 scope.go:117] "RemoveContainer" containerID="c3a44c0899a9afeaa74bb22565c3f9514603ce1b83f9794539f677d067785925" Sep 29 19:08:12 crc kubenswrapper[4792]: I0929 19:08:12.564279 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c3a44c0899a9afeaa74bb22565c3f9514603ce1b83f9794539f677d067785925"} err="failed to get container status \"c3a44c0899a9afeaa74bb22565c3f9514603ce1b83f9794539f677d067785925\": rpc error: code = NotFound desc = could not find container \"c3a44c0899a9afeaa74bb22565c3f9514603ce1b83f9794539f677d067785925\": container with ID starting with c3a44c0899a9afeaa74bb22565c3f9514603ce1b83f9794539f677d067785925 not found: ID does not exist" Sep 29 19:08:12 crc kubenswrapper[4792]: I0929 19:08:12.564295 4792 scope.go:117] "RemoveContainer" containerID="d0516004c2ea4a5711f5e00dcfa01fd5c8d0c0d0d60fd31b0d7da586cd83a820" Sep 29 19:08:12 crc kubenswrapper[4792]: I0929 19:08:12.564480 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d0516004c2ea4a5711f5e00dcfa01fd5c8d0c0d0d60fd31b0d7da586cd83a820"} err="failed to get container status \"d0516004c2ea4a5711f5e00dcfa01fd5c8d0c0d0d60fd31b0d7da586cd83a820\": rpc error: code = NotFound desc = could not find container \"d0516004c2ea4a5711f5e00dcfa01fd5c8d0c0d0d60fd31b0d7da586cd83a820\": container with ID starting with d0516004c2ea4a5711f5e00dcfa01fd5c8d0c0d0d60fd31b0d7da586cd83a820 not found: ID does not exist" Sep 29 19:08:12 crc kubenswrapper[4792]: I0929 19:08:12.564506 4792 scope.go:117] "RemoveContainer" containerID="7ae9d48d57439129f246e25077eb0e832a613136c8e2873a6194e55667617f86" Sep 29 19:08:12 crc kubenswrapper[4792]: I0929 19:08:12.564702 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7ae9d48d57439129f246e25077eb0e832a613136c8e2873a6194e55667617f86"} err="failed to get container status \"7ae9d48d57439129f246e25077eb0e832a613136c8e2873a6194e55667617f86\": rpc error: code = NotFound desc = could not find container \"7ae9d48d57439129f246e25077eb0e832a613136c8e2873a6194e55667617f86\": container with ID starting with 7ae9d48d57439129f246e25077eb0e832a613136c8e2873a6194e55667617f86 not found: ID does not exist" Sep 29 19:08:12 crc kubenswrapper[4792]: I0929 19:08:12.564722 4792 scope.go:117] "RemoveContainer" containerID="c8f292dcc3508a8a9eee89bc5ade8d636ca7f748ad3ac0af62bf8f356dace386" Sep 29 19:08:12 crc kubenswrapper[4792]: I0929 19:08:12.564927 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c8f292dcc3508a8a9eee89bc5ade8d636ca7f748ad3ac0af62bf8f356dace386"} err="failed to get container status \"c8f292dcc3508a8a9eee89bc5ade8d636ca7f748ad3ac0af62bf8f356dace386\": rpc error: code = NotFound desc = could not find container \"c8f292dcc3508a8a9eee89bc5ade8d636ca7f748ad3ac0af62bf8f356dace386\": container with ID starting with c8f292dcc3508a8a9eee89bc5ade8d636ca7f748ad3ac0af62bf8f356dace386 not found: ID does not exist" Sep 29 19:08:12 crc kubenswrapper[4792]: I0929 19:08:12.564973 4792 scope.go:117] "RemoveContainer" containerID="7138892e31e3d1949d0ae4789515fc0fd9868469eb14de1464a2f59786b85f08" Sep 29 19:08:12 crc kubenswrapper[4792]: I0929 19:08:12.565157 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7138892e31e3d1949d0ae4789515fc0fd9868469eb14de1464a2f59786b85f08"} err="failed to get container status \"7138892e31e3d1949d0ae4789515fc0fd9868469eb14de1464a2f59786b85f08\": rpc error: code = NotFound desc = could not find container \"7138892e31e3d1949d0ae4789515fc0fd9868469eb14de1464a2f59786b85f08\": container with ID starting with 7138892e31e3d1949d0ae4789515fc0fd9868469eb14de1464a2f59786b85f08 not found: ID does not exist" Sep 29 19:08:12 crc kubenswrapper[4792]: I0929 19:08:12.565175 4792 scope.go:117] "RemoveContainer" containerID="3408e50d82d1e7f50d9cd4fb2b4e078059bbc4daba10ca93c3cab56d4fe190be" Sep 29 19:08:12 crc kubenswrapper[4792]: I0929 19:08:12.565385 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3408e50d82d1e7f50d9cd4fb2b4e078059bbc4daba10ca93c3cab56d4fe190be"} err="failed to get container status \"3408e50d82d1e7f50d9cd4fb2b4e078059bbc4daba10ca93c3cab56d4fe190be\": rpc error: code = NotFound desc = could not find container \"3408e50d82d1e7f50d9cd4fb2b4e078059bbc4daba10ca93c3cab56d4fe190be\": container with ID starting with 3408e50d82d1e7f50d9cd4fb2b4e078059bbc4daba10ca93c3cab56d4fe190be not found: ID does not exist" Sep 29 19:08:12 crc kubenswrapper[4792]: I0929 19:08:12.565408 4792 scope.go:117] "RemoveContainer" containerID="a5023531ae972c8f19f5fbf8cdb3c4040f1b63d5d7b9d00e885607f0f84c88a1" Sep 29 19:08:12 crc kubenswrapper[4792]: I0929 19:08:12.565619 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a5023531ae972c8f19f5fbf8cdb3c4040f1b63d5d7b9d00e885607f0f84c88a1"} err="failed to get container status \"a5023531ae972c8f19f5fbf8cdb3c4040f1b63d5d7b9d00e885607f0f84c88a1\": rpc error: code = NotFound desc = could not find container \"a5023531ae972c8f19f5fbf8cdb3c4040f1b63d5d7b9d00e885607f0f84c88a1\": container with ID starting with a5023531ae972c8f19f5fbf8cdb3c4040f1b63d5d7b9d00e885607f0f84c88a1 not found: ID does not exist" Sep 29 19:08:12 crc kubenswrapper[4792]: I0929 19:08:12.565641 4792 scope.go:117] "RemoveContainer" containerID="46a3cf64e8fd5f5c75be0dd56175bd00e95e2780c73e39558e3b68ca1e6a44bc" Sep 29 19:08:12 crc kubenswrapper[4792]: I0929 19:08:12.569241 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"46a3cf64e8fd5f5c75be0dd56175bd00e95e2780c73e39558e3b68ca1e6a44bc"} err="failed to get container status \"46a3cf64e8fd5f5c75be0dd56175bd00e95e2780c73e39558e3b68ca1e6a44bc\": rpc error: code = NotFound desc = could not find container \"46a3cf64e8fd5f5c75be0dd56175bd00e95e2780c73e39558e3b68ca1e6a44bc\": container with ID starting with 46a3cf64e8fd5f5c75be0dd56175bd00e95e2780c73e39558e3b68ca1e6a44bc not found: ID does not exist" Sep 29 19:08:12 crc kubenswrapper[4792]: I0929 19:08:12.569329 4792 scope.go:117] "RemoveContainer" containerID="7b64445ce1e067504326c5005136522f885ba8796579cfb651019d2372a89173" Sep 29 19:08:12 crc kubenswrapper[4792]: I0929 19:08:12.569760 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7b64445ce1e067504326c5005136522f885ba8796579cfb651019d2372a89173"} err="failed to get container status \"7b64445ce1e067504326c5005136522f885ba8796579cfb651019d2372a89173\": rpc error: code = NotFound desc = could not find container \"7b64445ce1e067504326c5005136522f885ba8796579cfb651019d2372a89173\": container with ID starting with 7b64445ce1e067504326c5005136522f885ba8796579cfb651019d2372a89173 not found: ID does not exist" Sep 29 19:08:12 crc kubenswrapper[4792]: I0929 19:08:12.569799 4792 scope.go:117] "RemoveContainer" containerID="7e9625b3628f291ecaa686da104d719695bd8c46eb46d08f9eccab27a2013627" Sep 29 19:08:12 crc kubenswrapper[4792]: I0929 19:08:12.570420 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7e9625b3628f291ecaa686da104d719695bd8c46eb46d08f9eccab27a2013627"} err="failed to get container status \"7e9625b3628f291ecaa686da104d719695bd8c46eb46d08f9eccab27a2013627\": rpc error: code = NotFound desc = could not find container \"7e9625b3628f291ecaa686da104d719695bd8c46eb46d08f9eccab27a2013627\": container with ID starting with 7e9625b3628f291ecaa686da104d719695bd8c46eb46d08f9eccab27a2013627 not found: ID does not exist" Sep 29 19:08:12 crc kubenswrapper[4792]: I0929 19:08:12.570448 4792 scope.go:117] "RemoveContainer" containerID="c3a44c0899a9afeaa74bb22565c3f9514603ce1b83f9794539f677d067785925" Sep 29 19:08:12 crc kubenswrapper[4792]: I0929 19:08:12.570723 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c3a44c0899a9afeaa74bb22565c3f9514603ce1b83f9794539f677d067785925"} err="failed to get container status \"c3a44c0899a9afeaa74bb22565c3f9514603ce1b83f9794539f677d067785925\": rpc error: code = NotFound desc = could not find container \"c3a44c0899a9afeaa74bb22565c3f9514603ce1b83f9794539f677d067785925\": container with ID starting with c3a44c0899a9afeaa74bb22565c3f9514603ce1b83f9794539f677d067785925 not found: ID does not exist" Sep 29 19:08:12 crc kubenswrapper[4792]: I0929 19:08:12.570747 4792 scope.go:117] "RemoveContainer" containerID="d0516004c2ea4a5711f5e00dcfa01fd5c8d0c0d0d60fd31b0d7da586cd83a820" Sep 29 19:08:12 crc kubenswrapper[4792]: I0929 19:08:12.570992 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d0516004c2ea4a5711f5e00dcfa01fd5c8d0c0d0d60fd31b0d7da586cd83a820"} err="failed to get container status \"d0516004c2ea4a5711f5e00dcfa01fd5c8d0c0d0d60fd31b0d7da586cd83a820\": rpc error: code = NotFound desc = could not find container \"d0516004c2ea4a5711f5e00dcfa01fd5c8d0c0d0d60fd31b0d7da586cd83a820\": container with ID starting with d0516004c2ea4a5711f5e00dcfa01fd5c8d0c0d0d60fd31b0d7da586cd83a820 not found: ID does not exist" Sep 29 19:08:12 crc kubenswrapper[4792]: I0929 19:08:12.571012 4792 scope.go:117] "RemoveContainer" containerID="7ae9d48d57439129f246e25077eb0e832a613136c8e2873a6194e55667617f86" Sep 29 19:08:12 crc kubenswrapper[4792]: I0929 19:08:12.571333 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7ae9d48d57439129f246e25077eb0e832a613136c8e2873a6194e55667617f86"} err="failed to get container status \"7ae9d48d57439129f246e25077eb0e832a613136c8e2873a6194e55667617f86\": rpc error: code = NotFound desc = could not find container \"7ae9d48d57439129f246e25077eb0e832a613136c8e2873a6194e55667617f86\": container with ID starting with 7ae9d48d57439129f246e25077eb0e832a613136c8e2873a6194e55667617f86 not found: ID does not exist" Sep 29 19:08:13 crc kubenswrapper[4792]: I0929 19:08:13.021429 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="716c5fdd-0e02-4066-9210-93d805b6fe81" path="/var/lib/kubelet/pods/716c5fdd-0e02-4066-9210-93d805b6fe81/volumes" Sep 29 19:08:13 crc kubenswrapper[4792]: I0929 19:08:13.240352 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-bs7kw" event={"ID":"98a059b7-9711-49f3-8545-b6027d15e657","Type":"ContainerStarted","Data":"ba068506910789ae52f1a1a3460443ecaece50bd37dd68884c13d99ccfd95eb5"} Sep 29 19:08:13 crc kubenswrapper[4792]: I0929 19:08:13.240395 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-bs7kw" event={"ID":"98a059b7-9711-49f3-8545-b6027d15e657","Type":"ContainerStarted","Data":"05fdc60c62c406526690421c487d6818d71743fccd799e97739490568a4fcd4e"} Sep 29 19:08:13 crc kubenswrapper[4792]: I0929 19:08:13.240410 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-bs7kw" event={"ID":"98a059b7-9711-49f3-8545-b6027d15e657","Type":"ContainerStarted","Data":"561f6493cbe8bee88c9c64a07fa658e9bfaa83e5d071e78ab5385ce3929f14d8"} Sep 29 19:08:13 crc kubenswrapper[4792]: I0929 19:08:13.240421 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-bs7kw" event={"ID":"98a059b7-9711-49f3-8545-b6027d15e657","Type":"ContainerStarted","Data":"8b623e627449d9e864a707bb80cb2901b17445a1da8a128bd603b1b30effbe00"} Sep 29 19:08:13 crc kubenswrapper[4792]: I0929 19:08:13.240433 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-bs7kw" event={"ID":"98a059b7-9711-49f3-8545-b6027d15e657","Type":"ContainerStarted","Data":"e18cc167873ea3100c687677f2e5012257df0655ef5f1d551b972b07412b4940"} Sep 29 19:08:13 crc kubenswrapper[4792]: I0929 19:08:13.240445 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-bs7kw" event={"ID":"98a059b7-9711-49f3-8545-b6027d15e657","Type":"ContainerStarted","Data":"389b451997fa021b35c1b38e33a7e6ccbd937f99f4529e4087f06a35e92f31e4"} Sep 29 19:08:15 crc kubenswrapper[4792]: I0929 19:08:15.252131 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-bs7kw" event={"ID":"98a059b7-9711-49f3-8545-b6027d15e657","Type":"ContainerStarted","Data":"e1723ef8b9226837f507a051aa7ec32b82848ed4b399018ede91ab333e4bde6e"} Sep 29 19:08:18 crc kubenswrapper[4792]: I0929 19:08:18.271978 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-bs7kw" event={"ID":"98a059b7-9711-49f3-8545-b6027d15e657","Type":"ContainerStarted","Data":"b1a507042ccb5450c9ba17cfe2df4fa3557af46539ece5fd6d7522a507c4318f"} Sep 29 19:08:18 crc kubenswrapper[4792]: I0929 19:08:18.272757 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-bs7kw" Sep 29 19:08:18 crc kubenswrapper[4792]: I0929 19:08:18.272948 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-bs7kw" Sep 29 19:08:18 crc kubenswrapper[4792]: I0929 19:08:18.302887 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-bs7kw" Sep 29 19:08:18 crc kubenswrapper[4792]: I0929 19:08:18.303140 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-bs7kw" Sep 29 19:08:18 crc kubenswrapper[4792]: I0929 19:08:18.306946 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-node-bs7kw" podStartSLOduration=7.306929251 podStartE2EDuration="7.306929251s" podCreationTimestamp="2025-09-29 19:08:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 19:08:18.302315854 +0000 UTC m=+710.295623260" watchObservedRunningTime="2025-09-29 19:08:18.306929251 +0000 UTC m=+710.300236647" Sep 29 19:08:19 crc kubenswrapper[4792]: I0929 19:08:19.276743 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-bs7kw" Sep 29 19:08:27 crc kubenswrapper[4792]: I0929 19:08:27.015461 4792 scope.go:117] "RemoveContainer" containerID="ce5b36817c4429539a535b70ada4c3b33d548c1cffdb995523a6276f42da0607" Sep 29 19:08:27 crc kubenswrapper[4792]: E0929 19:08:27.016307 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 20s restarting failed container=kube-multus pod=multus-5hwvp_openshift-multus(100876d3-2539-47f1-91fa-0f91456ccac1)\"" pod="openshift-multus/multus-5hwvp" podUID="100876d3-2539-47f1-91fa-0f91456ccac1" Sep 29 19:08:41 crc kubenswrapper[4792]: I0929 19:08:41.876667 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-bs7kw" Sep 29 19:08:42 crc kubenswrapper[4792]: I0929 19:08:42.016158 4792 scope.go:117] "RemoveContainer" containerID="ce5b36817c4429539a535b70ada4c3b33d548c1cffdb995523a6276f42da0607" Sep 29 19:08:42 crc kubenswrapper[4792]: I0929 19:08:42.414206 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-5hwvp_100876d3-2539-47f1-91fa-0f91456ccac1/kube-multus/2.log" Sep 29 19:08:42 crc kubenswrapper[4792]: I0929 19:08:42.414484 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-5hwvp" event={"ID":"100876d3-2539-47f1-91fa-0f91456ccac1","Type":"ContainerStarted","Data":"0fc6aa99809024bb7fbc3369b37e63ed5dd001cb11dbfaf7aa0ba3043a2ef3ae"} Sep 29 19:08:51 crc kubenswrapper[4792]: I0929 19:08:51.394088 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcv7fhk"] Sep 29 19:08:51 crc kubenswrapper[4792]: I0929 19:08:51.395641 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcv7fhk" Sep 29 19:08:51 crc kubenswrapper[4792]: I0929 19:08:51.399391 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Sep 29 19:08:51 crc kubenswrapper[4792]: I0929 19:08:51.412945 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcv7fhk"] Sep 29 19:08:51 crc kubenswrapper[4792]: I0929 19:08:51.426423 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/eca64cc0-f739-41b8-812c-55536fc117b7-bundle\") pod \"9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcv7fhk\" (UID: \"eca64cc0-f739-41b8-812c-55536fc117b7\") " pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcv7fhk" Sep 29 19:08:51 crc kubenswrapper[4792]: I0929 19:08:51.426485 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/eca64cc0-f739-41b8-812c-55536fc117b7-util\") pod \"9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcv7fhk\" (UID: \"eca64cc0-f739-41b8-812c-55536fc117b7\") " pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcv7fhk" Sep 29 19:08:51 crc kubenswrapper[4792]: I0929 19:08:51.426591 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-59rk4\" (UniqueName: \"kubernetes.io/projected/eca64cc0-f739-41b8-812c-55536fc117b7-kube-api-access-59rk4\") pod \"9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcv7fhk\" (UID: \"eca64cc0-f739-41b8-812c-55536fc117b7\") " pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcv7fhk" Sep 29 19:08:51 crc kubenswrapper[4792]: I0929 19:08:51.528030 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/eca64cc0-f739-41b8-812c-55536fc117b7-util\") pod \"9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcv7fhk\" (UID: \"eca64cc0-f739-41b8-812c-55536fc117b7\") " pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcv7fhk" Sep 29 19:08:51 crc kubenswrapper[4792]: I0929 19:08:51.528121 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-59rk4\" (UniqueName: \"kubernetes.io/projected/eca64cc0-f739-41b8-812c-55536fc117b7-kube-api-access-59rk4\") pod \"9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcv7fhk\" (UID: \"eca64cc0-f739-41b8-812c-55536fc117b7\") " pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcv7fhk" Sep 29 19:08:51 crc kubenswrapper[4792]: I0929 19:08:51.528235 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/eca64cc0-f739-41b8-812c-55536fc117b7-bundle\") pod \"9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcv7fhk\" (UID: \"eca64cc0-f739-41b8-812c-55536fc117b7\") " pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcv7fhk" Sep 29 19:08:51 crc kubenswrapper[4792]: I0929 19:08:51.528844 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/eca64cc0-f739-41b8-812c-55536fc117b7-util\") pod \"9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcv7fhk\" (UID: \"eca64cc0-f739-41b8-812c-55536fc117b7\") " pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcv7fhk" Sep 29 19:08:51 crc kubenswrapper[4792]: I0929 19:08:51.528897 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/eca64cc0-f739-41b8-812c-55536fc117b7-bundle\") pod \"9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcv7fhk\" (UID: \"eca64cc0-f739-41b8-812c-55536fc117b7\") " pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcv7fhk" Sep 29 19:08:51 crc kubenswrapper[4792]: I0929 19:08:51.549442 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-59rk4\" (UniqueName: \"kubernetes.io/projected/eca64cc0-f739-41b8-812c-55536fc117b7-kube-api-access-59rk4\") pod \"9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcv7fhk\" (UID: \"eca64cc0-f739-41b8-812c-55536fc117b7\") " pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcv7fhk" Sep 29 19:08:51 crc kubenswrapper[4792]: I0929 19:08:51.711004 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcv7fhk" Sep 29 19:08:51 crc kubenswrapper[4792]: I0929 19:08:51.910580 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcv7fhk"] Sep 29 19:08:52 crc kubenswrapper[4792]: I0929 19:08:52.472603 4792 generic.go:334] "Generic (PLEG): container finished" podID="eca64cc0-f739-41b8-812c-55536fc117b7" containerID="d598a2309e1098c31565df3bf4df23fcf490eabacd8f98af01863055e741a097" exitCode=0 Sep 29 19:08:52 crc kubenswrapper[4792]: I0929 19:08:52.473660 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcv7fhk" event={"ID":"eca64cc0-f739-41b8-812c-55536fc117b7","Type":"ContainerDied","Data":"d598a2309e1098c31565df3bf4df23fcf490eabacd8f98af01863055e741a097"} Sep 29 19:08:52 crc kubenswrapper[4792]: I0929 19:08:52.473761 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcv7fhk" event={"ID":"eca64cc0-f739-41b8-812c-55536fc117b7","Type":"ContainerStarted","Data":"878c3fe4021fa1beaf34405ac90ef575ce67fe7d601dbba3187916cafd118756"} Sep 29 19:08:54 crc kubenswrapper[4792]: I0929 19:08:54.482163 4792 generic.go:334] "Generic (PLEG): container finished" podID="eca64cc0-f739-41b8-812c-55536fc117b7" containerID="50502b2bf337041c192d0923d78e1930ea007b74c7e4b133728ccfb1e9d6dca3" exitCode=0 Sep 29 19:08:54 crc kubenswrapper[4792]: I0929 19:08:54.482245 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcv7fhk" event={"ID":"eca64cc0-f739-41b8-812c-55536fc117b7","Type":"ContainerDied","Data":"50502b2bf337041c192d0923d78e1930ea007b74c7e4b133728ccfb1e9d6dca3"} Sep 29 19:08:55 crc kubenswrapper[4792]: I0929 19:08:55.492305 4792 generic.go:334] "Generic (PLEG): container finished" podID="eca64cc0-f739-41b8-812c-55536fc117b7" containerID="3eb886581a44ffed0c2832dbb77b07f1e0c133c88618c1d67a18e44f8e2f1c74" exitCode=0 Sep 29 19:08:55 crc kubenswrapper[4792]: I0929 19:08:55.492424 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcv7fhk" event={"ID":"eca64cc0-f739-41b8-812c-55536fc117b7","Type":"ContainerDied","Data":"3eb886581a44ffed0c2832dbb77b07f1e0c133c88618c1d67a18e44f8e2f1c74"} Sep 29 19:08:56 crc kubenswrapper[4792]: I0929 19:08:56.715767 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcv7fhk" Sep 29 19:08:56 crc kubenswrapper[4792]: I0929 19:08:56.897310 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/eca64cc0-f739-41b8-812c-55536fc117b7-bundle\") pod \"eca64cc0-f739-41b8-812c-55536fc117b7\" (UID: \"eca64cc0-f739-41b8-812c-55536fc117b7\") " Sep 29 19:08:56 crc kubenswrapper[4792]: I0929 19:08:56.897367 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-59rk4\" (UniqueName: \"kubernetes.io/projected/eca64cc0-f739-41b8-812c-55536fc117b7-kube-api-access-59rk4\") pod \"eca64cc0-f739-41b8-812c-55536fc117b7\" (UID: \"eca64cc0-f739-41b8-812c-55536fc117b7\") " Sep 29 19:08:56 crc kubenswrapper[4792]: I0929 19:08:56.897435 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/eca64cc0-f739-41b8-812c-55536fc117b7-util\") pod \"eca64cc0-f739-41b8-812c-55536fc117b7\" (UID: \"eca64cc0-f739-41b8-812c-55536fc117b7\") " Sep 29 19:08:56 crc kubenswrapper[4792]: I0929 19:08:56.897963 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/eca64cc0-f739-41b8-812c-55536fc117b7-bundle" (OuterVolumeSpecName: "bundle") pod "eca64cc0-f739-41b8-812c-55536fc117b7" (UID: "eca64cc0-f739-41b8-812c-55536fc117b7"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 19:08:56 crc kubenswrapper[4792]: I0929 19:08:56.910336 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/eca64cc0-f739-41b8-812c-55536fc117b7-util" (OuterVolumeSpecName: "util") pod "eca64cc0-f739-41b8-812c-55536fc117b7" (UID: "eca64cc0-f739-41b8-812c-55536fc117b7"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 19:08:56 crc kubenswrapper[4792]: I0929 19:08:56.910992 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/eca64cc0-f739-41b8-812c-55536fc117b7-kube-api-access-59rk4" (OuterVolumeSpecName: "kube-api-access-59rk4") pod "eca64cc0-f739-41b8-812c-55536fc117b7" (UID: "eca64cc0-f739-41b8-812c-55536fc117b7"). InnerVolumeSpecName "kube-api-access-59rk4". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:08:56 crc kubenswrapper[4792]: I0929 19:08:56.998746 4792 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/eca64cc0-f739-41b8-812c-55536fc117b7-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 19:08:56 crc kubenswrapper[4792]: I0929 19:08:56.998790 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-59rk4\" (UniqueName: \"kubernetes.io/projected/eca64cc0-f739-41b8-812c-55536fc117b7-kube-api-access-59rk4\") on node \"crc\" DevicePath \"\"" Sep 29 19:08:56 crc kubenswrapper[4792]: I0929 19:08:56.998801 4792 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/eca64cc0-f739-41b8-812c-55536fc117b7-util\") on node \"crc\" DevicePath \"\"" Sep 29 19:08:57 crc kubenswrapper[4792]: I0929 19:08:57.505824 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcv7fhk" event={"ID":"eca64cc0-f739-41b8-812c-55536fc117b7","Type":"ContainerDied","Data":"878c3fe4021fa1beaf34405ac90ef575ce67fe7d601dbba3187916cafd118756"} Sep 29 19:08:57 crc kubenswrapper[4792]: I0929 19:08:57.505892 4792 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="878c3fe4021fa1beaf34405ac90ef575ce67fe7d601dbba3187916cafd118756" Sep 29 19:08:57 crc kubenswrapper[4792]: I0929 19:08:57.505951 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcv7fhk" Sep 29 19:08:57 crc kubenswrapper[4792]: I0929 19:08:57.896181 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-p9pds"] Sep 29 19:08:57 crc kubenswrapper[4792]: I0929 19:08:57.896645 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-879f6c89f-p9pds" podUID="3a5115bb-23d8-4ff0-9c56-419450cd87fe" containerName="controller-manager" containerID="cri-o://e3700f1867c66f68f7b1117cca4480ba4f2a90f644403b4f33cf5ab79858811d" gracePeriod=30 Sep 29 19:08:57 crc kubenswrapper[4792]: I0929 19:08:57.996475 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-l44c7"] Sep 29 19:08:57 crc kubenswrapper[4792]: I0929 19:08:57.996712 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-l44c7" podUID="53064b9c-6401-4332-b64a-b8cbc84ae37c" containerName="route-controller-manager" containerID="cri-o://e31089b68510000b294ad308d14550309bb7bb3fb7f4e4fd50a23ce0abaaa354" gracePeriod=30 Sep 29 19:08:58 crc kubenswrapper[4792]: I0929 19:08:58.460653 4792 patch_prober.go:28] interesting pod/route-controller-manager-6576b87f9c-l44c7 container/route-controller-manager namespace/openshift-route-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.5:8443/healthz\": dial tcp 10.217.0.5:8443: connect: connection refused" start-of-body= Sep 29 19:08:58 crc kubenswrapper[4792]: I0929 19:08:58.460985 4792 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-l44c7" podUID="53064b9c-6401-4332-b64a-b8cbc84ae37c" containerName="route-controller-manager" probeResult="failure" output="Get \"https://10.217.0.5:8443/healthz\": dial tcp 10.217.0.5:8443: connect: connection refused" Sep 29 19:08:58 crc kubenswrapper[4792]: I0929 19:08:58.485168 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-p9pds" Sep 29 19:08:58 crc kubenswrapper[4792]: I0929 19:08:58.514225 4792 generic.go:334] "Generic (PLEG): container finished" podID="53064b9c-6401-4332-b64a-b8cbc84ae37c" containerID="e31089b68510000b294ad308d14550309bb7bb3fb7f4e4fd50a23ce0abaaa354" exitCode=0 Sep 29 19:08:58 crc kubenswrapper[4792]: I0929 19:08:58.514338 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-l44c7" event={"ID":"53064b9c-6401-4332-b64a-b8cbc84ae37c","Type":"ContainerDied","Data":"e31089b68510000b294ad308d14550309bb7bb3fb7f4e4fd50a23ce0abaaa354"} Sep 29 19:08:58 crc kubenswrapper[4792]: I0929 19:08:58.515961 4792 generic.go:334] "Generic (PLEG): container finished" podID="3a5115bb-23d8-4ff0-9c56-419450cd87fe" containerID="e3700f1867c66f68f7b1117cca4480ba4f2a90f644403b4f33cf5ab79858811d" exitCode=0 Sep 29 19:08:58 crc kubenswrapper[4792]: I0929 19:08:58.515993 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-p9pds" event={"ID":"3a5115bb-23d8-4ff0-9c56-419450cd87fe","Type":"ContainerDied","Data":"e3700f1867c66f68f7b1117cca4480ba4f2a90f644403b4f33cf5ab79858811d"} Sep 29 19:08:58 crc kubenswrapper[4792]: I0929 19:08:58.516009 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-p9pds" Sep 29 19:08:58 crc kubenswrapper[4792]: I0929 19:08:58.516027 4792 scope.go:117] "RemoveContainer" containerID="e3700f1867c66f68f7b1117cca4480ba4f2a90f644403b4f33cf5ab79858811d" Sep 29 19:08:58 crc kubenswrapper[4792]: I0929 19:08:58.516014 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-p9pds" event={"ID":"3a5115bb-23d8-4ff0-9c56-419450cd87fe","Type":"ContainerDied","Data":"01e0c786da2dd100626d7cf7cee6af30949c03fbc18fac6427b3e3bff5fb630d"} Sep 29 19:08:58 crc kubenswrapper[4792]: I0929 19:08:58.539008 4792 scope.go:117] "RemoveContainer" containerID="e3700f1867c66f68f7b1117cca4480ba4f2a90f644403b4f33cf5ab79858811d" Sep 29 19:08:58 crc kubenswrapper[4792]: E0929 19:08:58.539421 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e3700f1867c66f68f7b1117cca4480ba4f2a90f644403b4f33cf5ab79858811d\": container with ID starting with e3700f1867c66f68f7b1117cca4480ba4f2a90f644403b4f33cf5ab79858811d not found: ID does not exist" containerID="e3700f1867c66f68f7b1117cca4480ba4f2a90f644403b4f33cf5ab79858811d" Sep 29 19:08:58 crc kubenswrapper[4792]: I0929 19:08:58.539470 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e3700f1867c66f68f7b1117cca4480ba4f2a90f644403b4f33cf5ab79858811d"} err="failed to get container status \"e3700f1867c66f68f7b1117cca4480ba4f2a90f644403b4f33cf5ab79858811d\": rpc error: code = NotFound desc = could not find container \"e3700f1867c66f68f7b1117cca4480ba4f2a90f644403b4f33cf5ab79858811d\": container with ID starting with e3700f1867c66f68f7b1117cca4480ba4f2a90f644403b4f33cf5ab79858811d not found: ID does not exist" Sep 29 19:08:58 crc kubenswrapper[4792]: I0929 19:08:58.573447 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-operator-5d6f6cfd66-qr5w9"] Sep 29 19:08:58 crc kubenswrapper[4792]: E0929 19:08:58.573654 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="eca64cc0-f739-41b8-812c-55536fc117b7" containerName="util" Sep 29 19:08:58 crc kubenswrapper[4792]: I0929 19:08:58.573666 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="eca64cc0-f739-41b8-812c-55536fc117b7" containerName="util" Sep 29 19:08:58 crc kubenswrapper[4792]: E0929 19:08:58.573680 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3a5115bb-23d8-4ff0-9c56-419450cd87fe" containerName="controller-manager" Sep 29 19:08:58 crc kubenswrapper[4792]: I0929 19:08:58.573686 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="3a5115bb-23d8-4ff0-9c56-419450cd87fe" containerName="controller-manager" Sep 29 19:08:58 crc kubenswrapper[4792]: E0929 19:08:58.573694 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="eca64cc0-f739-41b8-812c-55536fc117b7" containerName="extract" Sep 29 19:08:58 crc kubenswrapper[4792]: I0929 19:08:58.573700 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="eca64cc0-f739-41b8-812c-55536fc117b7" containerName="extract" Sep 29 19:08:58 crc kubenswrapper[4792]: E0929 19:08:58.573716 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="eca64cc0-f739-41b8-812c-55536fc117b7" containerName="pull" Sep 29 19:08:58 crc kubenswrapper[4792]: I0929 19:08:58.573722 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="eca64cc0-f739-41b8-812c-55536fc117b7" containerName="pull" Sep 29 19:08:58 crc kubenswrapper[4792]: I0929 19:08:58.573806 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="3a5115bb-23d8-4ff0-9c56-419450cd87fe" containerName="controller-manager" Sep 29 19:08:58 crc kubenswrapper[4792]: I0929 19:08:58.573823 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="eca64cc0-f739-41b8-812c-55536fc117b7" containerName="extract" Sep 29 19:08:58 crc kubenswrapper[4792]: I0929 19:08:58.574189 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-operator-5d6f6cfd66-qr5w9" Sep 29 19:08:58 crc kubenswrapper[4792]: I0929 19:08:58.578209 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"kube-root-ca.crt" Sep 29 19:08:58 crc kubenswrapper[4792]: I0929 19:08:58.578370 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"openshift-service-ca.crt" Sep 29 19:08:58 crc kubenswrapper[4792]: I0929 19:08:58.588307 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-operator-5d6f6cfd66-qr5w9"] Sep 29 19:08:58 crc kubenswrapper[4792]: I0929 19:08:58.629004 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wgwqq\" (UniqueName: \"kubernetes.io/projected/3a5115bb-23d8-4ff0-9c56-419450cd87fe-kube-api-access-wgwqq\") pod \"3a5115bb-23d8-4ff0-9c56-419450cd87fe\" (UID: \"3a5115bb-23d8-4ff0-9c56-419450cd87fe\") " Sep 29 19:08:58 crc kubenswrapper[4792]: I0929 19:08:58.629053 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3a5115bb-23d8-4ff0-9c56-419450cd87fe-serving-cert\") pod \"3a5115bb-23d8-4ff0-9c56-419450cd87fe\" (UID: \"3a5115bb-23d8-4ff0-9c56-419450cd87fe\") " Sep 29 19:08:58 crc kubenswrapper[4792]: I0929 19:08:58.629137 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/3a5115bb-23d8-4ff0-9c56-419450cd87fe-client-ca\") pod \"3a5115bb-23d8-4ff0-9c56-419450cd87fe\" (UID: \"3a5115bb-23d8-4ff0-9c56-419450cd87fe\") " Sep 29 19:08:58 crc kubenswrapper[4792]: I0929 19:08:58.629183 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3a5115bb-23d8-4ff0-9c56-419450cd87fe-config\") pod \"3a5115bb-23d8-4ff0-9c56-419450cd87fe\" (UID: \"3a5115bb-23d8-4ff0-9c56-419450cd87fe\") " Sep 29 19:08:58 crc kubenswrapper[4792]: I0929 19:08:58.629541 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/3a5115bb-23d8-4ff0-9c56-419450cd87fe-proxy-ca-bundles\") pod \"3a5115bb-23d8-4ff0-9c56-419450cd87fe\" (UID: \"3a5115bb-23d8-4ff0-9c56-419450cd87fe\") " Sep 29 19:08:58 crc kubenswrapper[4792]: I0929 19:08:58.629664 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wgzvk\" (UniqueName: \"kubernetes.io/projected/6a491845-f4ef-4f82-b716-d46be2982350-kube-api-access-wgzvk\") pod \"nmstate-operator-5d6f6cfd66-qr5w9\" (UID: \"6a491845-f4ef-4f82-b716-d46be2982350\") " pod="openshift-nmstate/nmstate-operator-5d6f6cfd66-qr5w9" Sep 29 19:08:58 crc kubenswrapper[4792]: I0929 19:08:58.630028 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3a5115bb-23d8-4ff0-9c56-419450cd87fe-config" (OuterVolumeSpecName: "config") pod "3a5115bb-23d8-4ff0-9c56-419450cd87fe" (UID: "3a5115bb-23d8-4ff0-9c56-419450cd87fe"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:08:58 crc kubenswrapper[4792]: I0929 19:08:58.630271 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3a5115bb-23d8-4ff0-9c56-419450cd87fe-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "3a5115bb-23d8-4ff0-9c56-419450cd87fe" (UID: "3a5115bb-23d8-4ff0-9c56-419450cd87fe"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:08:58 crc kubenswrapper[4792]: I0929 19:08:58.630987 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3a5115bb-23d8-4ff0-9c56-419450cd87fe-client-ca" (OuterVolumeSpecName: "client-ca") pod "3a5115bb-23d8-4ff0-9c56-419450cd87fe" (UID: "3a5115bb-23d8-4ff0-9c56-419450cd87fe"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:08:58 crc kubenswrapper[4792]: I0929 19:08:58.637128 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3a5115bb-23d8-4ff0-9c56-419450cd87fe-kube-api-access-wgwqq" (OuterVolumeSpecName: "kube-api-access-wgwqq") pod "3a5115bb-23d8-4ff0-9c56-419450cd87fe" (UID: "3a5115bb-23d8-4ff0-9c56-419450cd87fe"). InnerVolumeSpecName "kube-api-access-wgwqq". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:08:58 crc kubenswrapper[4792]: I0929 19:08:58.645669 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3a5115bb-23d8-4ff0-9c56-419450cd87fe-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "3a5115bb-23d8-4ff0-9c56-419450cd87fe" (UID: "3a5115bb-23d8-4ff0-9c56-419450cd87fe"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:08:58 crc kubenswrapper[4792]: I0929 19:08:58.657790 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-l44c7" Sep 29 19:08:58 crc kubenswrapper[4792]: I0929 19:08:58.730191 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/53064b9c-6401-4332-b64a-b8cbc84ae37c-serving-cert\") pod \"53064b9c-6401-4332-b64a-b8cbc84ae37c\" (UID: \"53064b9c-6401-4332-b64a-b8cbc84ae37c\") " Sep 29 19:08:58 crc kubenswrapper[4792]: I0929 19:08:58.730237 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/53064b9c-6401-4332-b64a-b8cbc84ae37c-config\") pod \"53064b9c-6401-4332-b64a-b8cbc84ae37c\" (UID: \"53064b9c-6401-4332-b64a-b8cbc84ae37c\") " Sep 29 19:08:58 crc kubenswrapper[4792]: I0929 19:08:58.730259 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/53064b9c-6401-4332-b64a-b8cbc84ae37c-client-ca\") pod \"53064b9c-6401-4332-b64a-b8cbc84ae37c\" (UID: \"53064b9c-6401-4332-b64a-b8cbc84ae37c\") " Sep 29 19:08:58 crc kubenswrapper[4792]: I0929 19:08:58.730404 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wgzvk\" (UniqueName: \"kubernetes.io/projected/6a491845-f4ef-4f82-b716-d46be2982350-kube-api-access-wgzvk\") pod \"nmstate-operator-5d6f6cfd66-qr5w9\" (UID: \"6a491845-f4ef-4f82-b716-d46be2982350\") " pod="openshift-nmstate/nmstate-operator-5d6f6cfd66-qr5w9" Sep 29 19:08:58 crc kubenswrapper[4792]: I0929 19:08:58.730462 4792 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/3a5115bb-23d8-4ff0-9c56-419450cd87fe-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Sep 29 19:08:58 crc kubenswrapper[4792]: I0929 19:08:58.730473 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wgwqq\" (UniqueName: \"kubernetes.io/projected/3a5115bb-23d8-4ff0-9c56-419450cd87fe-kube-api-access-wgwqq\") on node \"crc\" DevicePath \"\"" Sep 29 19:08:58 crc kubenswrapper[4792]: I0929 19:08:58.730483 4792 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3a5115bb-23d8-4ff0-9c56-419450cd87fe-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 29 19:08:58 crc kubenswrapper[4792]: I0929 19:08:58.730491 4792 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/3a5115bb-23d8-4ff0-9c56-419450cd87fe-client-ca\") on node \"crc\" DevicePath \"\"" Sep 29 19:08:58 crc kubenswrapper[4792]: I0929 19:08:58.730500 4792 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3a5115bb-23d8-4ff0-9c56-419450cd87fe-config\") on node \"crc\" DevicePath \"\"" Sep 29 19:08:58 crc kubenswrapper[4792]: I0929 19:08:58.731117 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/53064b9c-6401-4332-b64a-b8cbc84ae37c-client-ca" (OuterVolumeSpecName: "client-ca") pod "53064b9c-6401-4332-b64a-b8cbc84ae37c" (UID: "53064b9c-6401-4332-b64a-b8cbc84ae37c"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:08:58 crc kubenswrapper[4792]: I0929 19:08:58.731151 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/53064b9c-6401-4332-b64a-b8cbc84ae37c-config" (OuterVolumeSpecName: "config") pod "53064b9c-6401-4332-b64a-b8cbc84ae37c" (UID: "53064b9c-6401-4332-b64a-b8cbc84ae37c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:08:58 crc kubenswrapper[4792]: I0929 19:08:58.733072 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/53064b9c-6401-4332-b64a-b8cbc84ae37c-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "53064b9c-6401-4332-b64a-b8cbc84ae37c" (UID: "53064b9c-6401-4332-b64a-b8cbc84ae37c"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:08:58 crc kubenswrapper[4792]: I0929 19:08:58.751610 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wgzvk\" (UniqueName: \"kubernetes.io/projected/6a491845-f4ef-4f82-b716-d46be2982350-kube-api-access-wgzvk\") pod \"nmstate-operator-5d6f6cfd66-qr5w9\" (UID: \"6a491845-f4ef-4f82-b716-d46be2982350\") " pod="openshift-nmstate/nmstate-operator-5d6f6cfd66-qr5w9" Sep 29 19:08:58 crc kubenswrapper[4792]: I0929 19:08:58.830968 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4bktj\" (UniqueName: \"kubernetes.io/projected/53064b9c-6401-4332-b64a-b8cbc84ae37c-kube-api-access-4bktj\") pod \"53064b9c-6401-4332-b64a-b8cbc84ae37c\" (UID: \"53064b9c-6401-4332-b64a-b8cbc84ae37c\") " Sep 29 19:08:58 crc kubenswrapper[4792]: I0929 19:08:58.831201 4792 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/53064b9c-6401-4332-b64a-b8cbc84ae37c-client-ca\") on node \"crc\" DevicePath \"\"" Sep 29 19:08:58 crc kubenswrapper[4792]: I0929 19:08:58.831215 4792 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/53064b9c-6401-4332-b64a-b8cbc84ae37c-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 29 19:08:58 crc kubenswrapper[4792]: I0929 19:08:58.831227 4792 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/53064b9c-6401-4332-b64a-b8cbc84ae37c-config\") on node \"crc\" DevicePath \"\"" Sep 29 19:08:58 crc kubenswrapper[4792]: I0929 19:08:58.833503 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/53064b9c-6401-4332-b64a-b8cbc84ae37c-kube-api-access-4bktj" (OuterVolumeSpecName: "kube-api-access-4bktj") pod "53064b9c-6401-4332-b64a-b8cbc84ae37c" (UID: "53064b9c-6401-4332-b64a-b8cbc84ae37c"). InnerVolumeSpecName "kube-api-access-4bktj". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:08:58 crc kubenswrapper[4792]: I0929 19:08:58.842921 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-p9pds"] Sep 29 19:08:58 crc kubenswrapper[4792]: I0929 19:08:58.846075 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-p9pds"] Sep 29 19:08:58 crc kubenswrapper[4792]: I0929 19:08:58.896349 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-operator-5d6f6cfd66-qr5w9" Sep 29 19:08:58 crc kubenswrapper[4792]: I0929 19:08:58.932714 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4bktj\" (UniqueName: \"kubernetes.io/projected/53064b9c-6401-4332-b64a-b8cbc84ae37c-kube-api-access-4bktj\") on node \"crc\" DevicePath \"\"" Sep 29 19:08:59 crc kubenswrapper[4792]: I0929 19:08:59.022718 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3a5115bb-23d8-4ff0-9c56-419450cd87fe" path="/var/lib/kubelet/pods/3a5115bb-23d8-4ff0-9c56-419450cd87fe/volumes" Sep 29 19:08:59 crc kubenswrapper[4792]: I0929 19:08:59.106554 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-operator-5d6f6cfd66-qr5w9"] Sep 29 19:08:59 crc kubenswrapper[4792]: I0929 19:08:59.137773 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-7ccddcb4c6-7grs4"] Sep 29 19:08:59 crc kubenswrapper[4792]: E0929 19:08:59.138033 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="53064b9c-6401-4332-b64a-b8cbc84ae37c" containerName="route-controller-manager" Sep 29 19:08:59 crc kubenswrapper[4792]: I0929 19:08:59.138055 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="53064b9c-6401-4332-b64a-b8cbc84ae37c" containerName="route-controller-manager" Sep 29 19:08:59 crc kubenswrapper[4792]: I0929 19:08:59.138179 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="53064b9c-6401-4332-b64a-b8cbc84ae37c" containerName="route-controller-manager" Sep 29 19:08:59 crc kubenswrapper[4792]: I0929 19:08:59.138629 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-7ccddcb4c6-7grs4" Sep 29 19:08:59 crc kubenswrapper[4792]: I0929 19:08:59.147395 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-7ccddcb4c6-7grs4"] Sep 29 19:08:59 crc kubenswrapper[4792]: I0929 19:08:59.237390 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e174dbcf-eda1-4bd6-8b77-0f91d12e3e41-serving-cert\") pod \"route-controller-manager-7ccddcb4c6-7grs4\" (UID: \"e174dbcf-eda1-4bd6-8b77-0f91d12e3e41\") " pod="openshift-route-controller-manager/route-controller-manager-7ccddcb4c6-7grs4" Sep 29 19:08:59 crc kubenswrapper[4792]: I0929 19:08:59.237435 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-64jk2\" (UniqueName: \"kubernetes.io/projected/e174dbcf-eda1-4bd6-8b77-0f91d12e3e41-kube-api-access-64jk2\") pod \"route-controller-manager-7ccddcb4c6-7grs4\" (UID: \"e174dbcf-eda1-4bd6-8b77-0f91d12e3e41\") " pod="openshift-route-controller-manager/route-controller-manager-7ccddcb4c6-7grs4" Sep 29 19:08:59 crc kubenswrapper[4792]: I0929 19:08:59.237669 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/e174dbcf-eda1-4bd6-8b77-0f91d12e3e41-client-ca\") pod \"route-controller-manager-7ccddcb4c6-7grs4\" (UID: \"e174dbcf-eda1-4bd6-8b77-0f91d12e3e41\") " pod="openshift-route-controller-manager/route-controller-manager-7ccddcb4c6-7grs4" Sep 29 19:08:59 crc kubenswrapper[4792]: I0929 19:08:59.237740 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e174dbcf-eda1-4bd6-8b77-0f91d12e3e41-config\") pod \"route-controller-manager-7ccddcb4c6-7grs4\" (UID: \"e174dbcf-eda1-4bd6-8b77-0f91d12e3e41\") " pod="openshift-route-controller-manager/route-controller-manager-7ccddcb4c6-7grs4" Sep 29 19:08:59 crc kubenswrapper[4792]: I0929 19:08:59.339572 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/e174dbcf-eda1-4bd6-8b77-0f91d12e3e41-client-ca\") pod \"route-controller-manager-7ccddcb4c6-7grs4\" (UID: \"e174dbcf-eda1-4bd6-8b77-0f91d12e3e41\") " pod="openshift-route-controller-manager/route-controller-manager-7ccddcb4c6-7grs4" Sep 29 19:08:59 crc kubenswrapper[4792]: I0929 19:08:59.338735 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/e174dbcf-eda1-4bd6-8b77-0f91d12e3e41-client-ca\") pod \"route-controller-manager-7ccddcb4c6-7grs4\" (UID: \"e174dbcf-eda1-4bd6-8b77-0f91d12e3e41\") " pod="openshift-route-controller-manager/route-controller-manager-7ccddcb4c6-7grs4" Sep 29 19:08:59 crc kubenswrapper[4792]: I0929 19:08:59.339952 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e174dbcf-eda1-4bd6-8b77-0f91d12e3e41-config\") pod \"route-controller-manager-7ccddcb4c6-7grs4\" (UID: \"e174dbcf-eda1-4bd6-8b77-0f91d12e3e41\") " pod="openshift-route-controller-manager/route-controller-manager-7ccddcb4c6-7grs4" Sep 29 19:08:59 crc kubenswrapper[4792]: I0929 19:08:59.340753 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e174dbcf-eda1-4bd6-8b77-0f91d12e3e41-config\") pod \"route-controller-manager-7ccddcb4c6-7grs4\" (UID: \"e174dbcf-eda1-4bd6-8b77-0f91d12e3e41\") " pod="openshift-route-controller-manager/route-controller-manager-7ccddcb4c6-7grs4" Sep 29 19:08:59 crc kubenswrapper[4792]: I0929 19:08:59.341094 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e174dbcf-eda1-4bd6-8b77-0f91d12e3e41-serving-cert\") pod \"route-controller-manager-7ccddcb4c6-7grs4\" (UID: \"e174dbcf-eda1-4bd6-8b77-0f91d12e3e41\") " pod="openshift-route-controller-manager/route-controller-manager-7ccddcb4c6-7grs4" Sep 29 19:08:59 crc kubenswrapper[4792]: I0929 19:08:59.341229 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-64jk2\" (UniqueName: \"kubernetes.io/projected/e174dbcf-eda1-4bd6-8b77-0f91d12e3e41-kube-api-access-64jk2\") pod \"route-controller-manager-7ccddcb4c6-7grs4\" (UID: \"e174dbcf-eda1-4bd6-8b77-0f91d12e3e41\") " pod="openshift-route-controller-manager/route-controller-manager-7ccddcb4c6-7grs4" Sep 29 19:08:59 crc kubenswrapper[4792]: I0929 19:08:59.347696 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e174dbcf-eda1-4bd6-8b77-0f91d12e3e41-serving-cert\") pod \"route-controller-manager-7ccddcb4c6-7grs4\" (UID: \"e174dbcf-eda1-4bd6-8b77-0f91d12e3e41\") " pod="openshift-route-controller-manager/route-controller-manager-7ccddcb4c6-7grs4" Sep 29 19:08:59 crc kubenswrapper[4792]: I0929 19:08:59.367350 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-64jk2\" (UniqueName: \"kubernetes.io/projected/e174dbcf-eda1-4bd6-8b77-0f91d12e3e41-kube-api-access-64jk2\") pod \"route-controller-manager-7ccddcb4c6-7grs4\" (UID: \"e174dbcf-eda1-4bd6-8b77-0f91d12e3e41\") " pod="openshift-route-controller-manager/route-controller-manager-7ccddcb4c6-7grs4" Sep 29 19:08:59 crc kubenswrapper[4792]: I0929 19:08:59.451287 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-7ccddcb4c6-7grs4" Sep 29 19:08:59 crc kubenswrapper[4792]: I0929 19:08:59.479299 4792 patch_prober.go:28] interesting pod/controller-manager-879f6c89f-p9pds container/controller-manager namespace/openshift-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.6:8443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Sep 29 19:08:59 crc kubenswrapper[4792]: I0929 19:08:59.479786 4792 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-controller-manager/controller-manager-879f6c89f-p9pds" podUID="3a5115bb-23d8-4ff0-9c56-419450cd87fe" containerName="controller-manager" probeResult="failure" output="Get \"https://10.217.0.6:8443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Sep 29 19:08:59 crc kubenswrapper[4792]: I0929 19:08:59.527863 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-l44c7" event={"ID":"53064b9c-6401-4332-b64a-b8cbc84ae37c","Type":"ContainerDied","Data":"07730ba3d41660f554a2e8df96306b54d02a57b919e76f6fd01652dcc057e533"} Sep 29 19:08:59 crc kubenswrapper[4792]: I0929 19:08:59.528172 4792 scope.go:117] "RemoveContainer" containerID="e31089b68510000b294ad308d14550309bb7bb3fb7f4e4fd50a23ce0abaaa354" Sep 29 19:08:59 crc kubenswrapper[4792]: I0929 19:08:59.527919 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-l44c7" Sep 29 19:08:59 crc kubenswrapper[4792]: I0929 19:08:59.530471 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-operator-5d6f6cfd66-qr5w9" event={"ID":"6a491845-f4ef-4f82-b716-d46be2982350","Type":"ContainerStarted","Data":"2f95afbbe51459340195fdb2b0c1ff35d83d164040ffddabc288e5c17d873952"} Sep 29 19:08:59 crc kubenswrapper[4792]: I0929 19:08:59.556034 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-l44c7"] Sep 29 19:08:59 crc kubenswrapper[4792]: I0929 19:08:59.565227 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-l44c7"] Sep 29 19:08:59 crc kubenswrapper[4792]: I0929 19:08:59.701514 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-7ccddcb4c6-7grs4"] Sep 29 19:08:59 crc kubenswrapper[4792]: I0929 19:08:59.727079 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-7ccddcb4c6-7grs4"] Sep 29 19:08:59 crc kubenswrapper[4792]: W0929 19:08:59.735335 4792 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode174dbcf_eda1_4bd6_8b77_0f91d12e3e41.slice/crio-a1e681934f547cfce6a39ae32334ac32041531dadf7c5bc1023a3e0d504cf617 WatchSource:0}: Error finding container a1e681934f547cfce6a39ae32334ac32041531dadf7c5bc1023a3e0d504cf617: Status 404 returned error can't find the container with id a1e681934f547cfce6a39ae32334ac32041531dadf7c5bc1023a3e0d504cf617 Sep 29 19:09:00 crc kubenswrapper[4792]: I0929 19:09:00.129315 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-6f6669768-4wpl8"] Sep 29 19:09:00 crc kubenswrapper[4792]: I0929 19:09:00.130311 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-6f6669768-4wpl8" Sep 29 19:09:00 crc kubenswrapper[4792]: I0929 19:09:00.133603 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Sep 29 19:09:00 crc kubenswrapper[4792]: I0929 19:09:00.142537 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Sep 29 19:09:00 crc kubenswrapper[4792]: I0929 19:09:00.142627 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Sep 29 19:09:00 crc kubenswrapper[4792]: I0929 19:09:00.142746 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Sep 29 19:09:00 crc kubenswrapper[4792]: I0929 19:09:00.142912 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Sep 29 19:09:00 crc kubenswrapper[4792]: I0929 19:09:00.143098 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Sep 29 19:09:00 crc kubenswrapper[4792]: I0929 19:09:00.145088 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Sep 29 19:09:00 crc kubenswrapper[4792]: I0929 19:09:00.145890 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-6f6669768-4wpl8"] Sep 29 19:09:00 crc kubenswrapper[4792]: I0929 19:09:00.252045 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cbfc8688-3dca-4ef3-82bc-bca914276d1c-config\") pod \"controller-manager-6f6669768-4wpl8\" (UID: \"cbfc8688-3dca-4ef3-82bc-bca914276d1c\") " pod="openshift-controller-manager/controller-manager-6f6669768-4wpl8" Sep 29 19:09:00 crc kubenswrapper[4792]: I0929 19:09:00.252101 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/cbfc8688-3dca-4ef3-82bc-bca914276d1c-serving-cert\") pod \"controller-manager-6f6669768-4wpl8\" (UID: \"cbfc8688-3dca-4ef3-82bc-bca914276d1c\") " pod="openshift-controller-manager/controller-manager-6f6669768-4wpl8" Sep 29 19:09:00 crc kubenswrapper[4792]: I0929 19:09:00.252128 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/cbfc8688-3dca-4ef3-82bc-bca914276d1c-client-ca\") pod \"controller-manager-6f6669768-4wpl8\" (UID: \"cbfc8688-3dca-4ef3-82bc-bca914276d1c\") " pod="openshift-controller-manager/controller-manager-6f6669768-4wpl8" Sep 29 19:09:00 crc kubenswrapper[4792]: I0929 19:09:00.252177 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/cbfc8688-3dca-4ef3-82bc-bca914276d1c-proxy-ca-bundles\") pod \"controller-manager-6f6669768-4wpl8\" (UID: \"cbfc8688-3dca-4ef3-82bc-bca914276d1c\") " pod="openshift-controller-manager/controller-manager-6f6669768-4wpl8" Sep 29 19:09:00 crc kubenswrapper[4792]: I0929 19:09:00.252258 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-twcrv\" (UniqueName: \"kubernetes.io/projected/cbfc8688-3dca-4ef3-82bc-bca914276d1c-kube-api-access-twcrv\") pod \"controller-manager-6f6669768-4wpl8\" (UID: \"cbfc8688-3dca-4ef3-82bc-bca914276d1c\") " pod="openshift-controller-manager/controller-manager-6f6669768-4wpl8" Sep 29 19:09:00 crc kubenswrapper[4792]: I0929 19:09:00.353239 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cbfc8688-3dca-4ef3-82bc-bca914276d1c-config\") pod \"controller-manager-6f6669768-4wpl8\" (UID: \"cbfc8688-3dca-4ef3-82bc-bca914276d1c\") " pod="openshift-controller-manager/controller-manager-6f6669768-4wpl8" Sep 29 19:09:00 crc kubenswrapper[4792]: I0929 19:09:00.353285 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/cbfc8688-3dca-4ef3-82bc-bca914276d1c-serving-cert\") pod \"controller-manager-6f6669768-4wpl8\" (UID: \"cbfc8688-3dca-4ef3-82bc-bca914276d1c\") " pod="openshift-controller-manager/controller-manager-6f6669768-4wpl8" Sep 29 19:09:00 crc kubenswrapper[4792]: I0929 19:09:00.353306 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/cbfc8688-3dca-4ef3-82bc-bca914276d1c-client-ca\") pod \"controller-manager-6f6669768-4wpl8\" (UID: \"cbfc8688-3dca-4ef3-82bc-bca914276d1c\") " pod="openshift-controller-manager/controller-manager-6f6669768-4wpl8" Sep 29 19:09:00 crc kubenswrapper[4792]: I0929 19:09:00.353351 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/cbfc8688-3dca-4ef3-82bc-bca914276d1c-proxy-ca-bundles\") pod \"controller-manager-6f6669768-4wpl8\" (UID: \"cbfc8688-3dca-4ef3-82bc-bca914276d1c\") " pod="openshift-controller-manager/controller-manager-6f6669768-4wpl8" Sep 29 19:09:00 crc kubenswrapper[4792]: I0929 19:09:00.353375 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-twcrv\" (UniqueName: \"kubernetes.io/projected/cbfc8688-3dca-4ef3-82bc-bca914276d1c-kube-api-access-twcrv\") pod \"controller-manager-6f6669768-4wpl8\" (UID: \"cbfc8688-3dca-4ef3-82bc-bca914276d1c\") " pod="openshift-controller-manager/controller-manager-6f6669768-4wpl8" Sep 29 19:09:00 crc kubenswrapper[4792]: I0929 19:09:00.354636 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/cbfc8688-3dca-4ef3-82bc-bca914276d1c-client-ca\") pod \"controller-manager-6f6669768-4wpl8\" (UID: \"cbfc8688-3dca-4ef3-82bc-bca914276d1c\") " pod="openshift-controller-manager/controller-manager-6f6669768-4wpl8" Sep 29 19:09:00 crc kubenswrapper[4792]: I0929 19:09:00.354928 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cbfc8688-3dca-4ef3-82bc-bca914276d1c-config\") pod \"controller-manager-6f6669768-4wpl8\" (UID: \"cbfc8688-3dca-4ef3-82bc-bca914276d1c\") " pod="openshift-controller-manager/controller-manager-6f6669768-4wpl8" Sep 29 19:09:00 crc kubenswrapper[4792]: I0929 19:09:00.355616 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/cbfc8688-3dca-4ef3-82bc-bca914276d1c-proxy-ca-bundles\") pod \"controller-manager-6f6669768-4wpl8\" (UID: \"cbfc8688-3dca-4ef3-82bc-bca914276d1c\") " pod="openshift-controller-manager/controller-manager-6f6669768-4wpl8" Sep 29 19:09:00 crc kubenswrapper[4792]: I0929 19:09:00.360181 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/cbfc8688-3dca-4ef3-82bc-bca914276d1c-serving-cert\") pod \"controller-manager-6f6669768-4wpl8\" (UID: \"cbfc8688-3dca-4ef3-82bc-bca914276d1c\") " pod="openshift-controller-manager/controller-manager-6f6669768-4wpl8" Sep 29 19:09:00 crc kubenswrapper[4792]: I0929 19:09:00.397504 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-twcrv\" (UniqueName: \"kubernetes.io/projected/cbfc8688-3dca-4ef3-82bc-bca914276d1c-kube-api-access-twcrv\") pod \"controller-manager-6f6669768-4wpl8\" (UID: \"cbfc8688-3dca-4ef3-82bc-bca914276d1c\") " pod="openshift-controller-manager/controller-manager-6f6669768-4wpl8" Sep 29 19:09:00 crc kubenswrapper[4792]: I0929 19:09:00.446284 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-6f6669768-4wpl8" Sep 29 19:09:00 crc kubenswrapper[4792]: I0929 19:09:00.547267 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-7ccddcb4c6-7grs4" event={"ID":"e174dbcf-eda1-4bd6-8b77-0f91d12e3e41","Type":"ContainerStarted","Data":"03401129a7b0b1ffd94aa07f8e244f943742edca57365564fe54787f3dc3337a"} Sep 29 19:09:00 crc kubenswrapper[4792]: I0929 19:09:00.547636 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-7ccddcb4c6-7grs4" event={"ID":"e174dbcf-eda1-4bd6-8b77-0f91d12e3e41","Type":"ContainerStarted","Data":"a1e681934f547cfce6a39ae32334ac32041531dadf7c5bc1023a3e0d504cf617"} Sep 29 19:09:00 crc kubenswrapper[4792]: I0929 19:09:00.547757 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-7ccddcb4c6-7grs4" podUID="e174dbcf-eda1-4bd6-8b77-0f91d12e3e41" containerName="route-controller-manager" containerID="cri-o://03401129a7b0b1ffd94aa07f8e244f943742edca57365564fe54787f3dc3337a" gracePeriod=30 Sep 29 19:09:00 crc kubenswrapper[4792]: I0929 19:09:00.549331 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-7ccddcb4c6-7grs4" Sep 29 19:09:00 crc kubenswrapper[4792]: I0929 19:09:00.552669 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-7ccddcb4c6-7grs4" Sep 29 19:09:00 crc kubenswrapper[4792]: I0929 19:09:00.575112 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-7ccddcb4c6-7grs4" podStartSLOduration=2.57509474 podStartE2EDuration="2.57509474s" podCreationTimestamp="2025-09-29 19:08:58 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 19:09:00.572073887 +0000 UTC m=+752.565381293" watchObservedRunningTime="2025-09-29 19:09:00.57509474 +0000 UTC m=+752.568402126" Sep 29 19:09:00 crc kubenswrapper[4792]: I0929 19:09:00.696477 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-6f6669768-4wpl8"] Sep 29 19:09:00 crc kubenswrapper[4792]: W0929 19:09:00.703146 4792 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podcbfc8688_3dca_4ef3_82bc_bca914276d1c.slice/crio-a9976d9f7b85c93f4fb999abe7496bc479de8a0f275960759d578fb78392f63a WatchSource:0}: Error finding container a9976d9f7b85c93f4fb999abe7496bc479de8a0f275960759d578fb78392f63a: Status 404 returned error can't find the container with id a9976d9f7b85c93f4fb999abe7496bc479de8a0f275960759d578fb78392f63a Sep 29 19:09:01 crc kubenswrapper[4792]: I0929 19:09:01.023264 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="53064b9c-6401-4332-b64a-b8cbc84ae37c" path="/var/lib/kubelet/pods/53064b9c-6401-4332-b64a-b8cbc84ae37c/volumes" Sep 29 19:09:01 crc kubenswrapper[4792]: I0929 19:09:01.554213 4792 generic.go:334] "Generic (PLEG): container finished" podID="e174dbcf-eda1-4bd6-8b77-0f91d12e3e41" containerID="03401129a7b0b1ffd94aa07f8e244f943742edca57365564fe54787f3dc3337a" exitCode=0 Sep 29 19:09:01 crc kubenswrapper[4792]: I0929 19:09:01.554795 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-7ccddcb4c6-7grs4" event={"ID":"e174dbcf-eda1-4bd6-8b77-0f91d12e3e41","Type":"ContainerDied","Data":"03401129a7b0b1ffd94aa07f8e244f943742edca57365564fe54787f3dc3337a"} Sep 29 19:09:01 crc kubenswrapper[4792]: I0929 19:09:01.554826 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-7ccddcb4c6-7grs4" event={"ID":"e174dbcf-eda1-4bd6-8b77-0f91d12e3e41","Type":"ContainerDied","Data":"a1e681934f547cfce6a39ae32334ac32041531dadf7c5bc1023a3e0d504cf617"} Sep 29 19:09:01 crc kubenswrapper[4792]: I0929 19:09:01.554840 4792 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a1e681934f547cfce6a39ae32334ac32041531dadf7c5bc1023a3e0d504cf617" Sep 29 19:09:01 crc kubenswrapper[4792]: I0929 19:09:01.562901 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-6f6669768-4wpl8" event={"ID":"cbfc8688-3dca-4ef3-82bc-bca914276d1c","Type":"ContainerStarted","Data":"6d1fdc1c19fd403c07eed8caa6fd1e6e72a65eb9507fce32e10225c6ce795bd5"} Sep 29 19:09:01 crc kubenswrapper[4792]: I0929 19:09:01.562941 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-6f6669768-4wpl8" event={"ID":"cbfc8688-3dca-4ef3-82bc-bca914276d1c","Type":"ContainerStarted","Data":"a9976d9f7b85c93f4fb999abe7496bc479de8a0f275960759d578fb78392f63a"} Sep 29 19:09:01 crc kubenswrapper[4792]: I0929 19:09:01.563175 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-6f6669768-4wpl8" Sep 29 19:09:01 crc kubenswrapper[4792]: I0929 19:09:01.573100 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-6f6669768-4wpl8" Sep 29 19:09:01 crc kubenswrapper[4792]: I0929 19:09:01.578548 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-7ccddcb4c6-7grs4" Sep 29 19:09:01 crc kubenswrapper[4792]: I0929 19:09:01.610627 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-6f6669768-4wpl8" podStartSLOduration=3.6106092480000003 podStartE2EDuration="3.610609248s" podCreationTimestamp="2025-09-29 19:08:58 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 19:09:01.588212065 +0000 UTC m=+753.581519461" watchObservedRunningTime="2025-09-29 19:09:01.610609248 +0000 UTC m=+753.603916644" Sep 29 19:09:01 crc kubenswrapper[4792]: I0929 19:09:01.622759 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-d5b77d6f6-qqq6l"] Sep 29 19:09:01 crc kubenswrapper[4792]: E0929 19:09:01.623007 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e174dbcf-eda1-4bd6-8b77-0f91d12e3e41" containerName="route-controller-manager" Sep 29 19:09:01 crc kubenswrapper[4792]: I0929 19:09:01.623024 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="e174dbcf-eda1-4bd6-8b77-0f91d12e3e41" containerName="route-controller-manager" Sep 29 19:09:01 crc kubenswrapper[4792]: I0929 19:09:01.623114 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="e174dbcf-eda1-4bd6-8b77-0f91d12e3e41" containerName="route-controller-manager" Sep 29 19:09:01 crc kubenswrapper[4792]: I0929 19:09:01.623460 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-d5b77d6f6-qqq6l" Sep 29 19:09:01 crc kubenswrapper[4792]: I0929 19:09:01.642043 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-d5b77d6f6-qqq6l"] Sep 29 19:09:01 crc kubenswrapper[4792]: I0929 19:09:01.775391 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e174dbcf-eda1-4bd6-8b77-0f91d12e3e41-config\") pod \"e174dbcf-eda1-4bd6-8b77-0f91d12e3e41\" (UID: \"e174dbcf-eda1-4bd6-8b77-0f91d12e3e41\") " Sep 29 19:09:01 crc kubenswrapper[4792]: I0929 19:09:01.775454 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e174dbcf-eda1-4bd6-8b77-0f91d12e3e41-serving-cert\") pod \"e174dbcf-eda1-4bd6-8b77-0f91d12e3e41\" (UID: \"e174dbcf-eda1-4bd6-8b77-0f91d12e3e41\") " Sep 29 19:09:01 crc kubenswrapper[4792]: I0929 19:09:01.775493 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-64jk2\" (UniqueName: \"kubernetes.io/projected/e174dbcf-eda1-4bd6-8b77-0f91d12e3e41-kube-api-access-64jk2\") pod \"e174dbcf-eda1-4bd6-8b77-0f91d12e3e41\" (UID: \"e174dbcf-eda1-4bd6-8b77-0f91d12e3e41\") " Sep 29 19:09:01 crc kubenswrapper[4792]: I0929 19:09:01.775549 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/e174dbcf-eda1-4bd6-8b77-0f91d12e3e41-client-ca\") pod \"e174dbcf-eda1-4bd6-8b77-0f91d12e3e41\" (UID: \"e174dbcf-eda1-4bd6-8b77-0f91d12e3e41\") " Sep 29 19:09:01 crc kubenswrapper[4792]: I0929 19:09:01.775701 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/ade21d99-04df-430e-afa6-dc28ad305b89-client-ca\") pod \"route-controller-manager-d5b77d6f6-qqq6l\" (UID: \"ade21d99-04df-430e-afa6-dc28ad305b89\") " pod="openshift-route-controller-manager/route-controller-manager-d5b77d6f6-qqq6l" Sep 29 19:09:01 crc kubenswrapper[4792]: I0929 19:09:01.775724 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ade21d99-04df-430e-afa6-dc28ad305b89-serving-cert\") pod \"route-controller-manager-d5b77d6f6-qqq6l\" (UID: \"ade21d99-04df-430e-afa6-dc28ad305b89\") " pod="openshift-route-controller-manager/route-controller-manager-d5b77d6f6-qqq6l" Sep 29 19:09:01 crc kubenswrapper[4792]: I0929 19:09:01.775752 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sm7qd\" (UniqueName: \"kubernetes.io/projected/ade21d99-04df-430e-afa6-dc28ad305b89-kube-api-access-sm7qd\") pod \"route-controller-manager-d5b77d6f6-qqq6l\" (UID: \"ade21d99-04df-430e-afa6-dc28ad305b89\") " pod="openshift-route-controller-manager/route-controller-manager-d5b77d6f6-qqq6l" Sep 29 19:09:01 crc kubenswrapper[4792]: I0929 19:09:01.775774 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ade21d99-04df-430e-afa6-dc28ad305b89-config\") pod \"route-controller-manager-d5b77d6f6-qqq6l\" (UID: \"ade21d99-04df-430e-afa6-dc28ad305b89\") " pod="openshift-route-controller-manager/route-controller-manager-d5b77d6f6-qqq6l" Sep 29 19:09:01 crc kubenswrapper[4792]: I0929 19:09:01.776225 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e174dbcf-eda1-4bd6-8b77-0f91d12e3e41-config" (OuterVolumeSpecName: "config") pod "e174dbcf-eda1-4bd6-8b77-0f91d12e3e41" (UID: "e174dbcf-eda1-4bd6-8b77-0f91d12e3e41"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:09:01 crc kubenswrapper[4792]: I0929 19:09:01.776741 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e174dbcf-eda1-4bd6-8b77-0f91d12e3e41-client-ca" (OuterVolumeSpecName: "client-ca") pod "e174dbcf-eda1-4bd6-8b77-0f91d12e3e41" (UID: "e174dbcf-eda1-4bd6-8b77-0f91d12e3e41"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:09:01 crc kubenswrapper[4792]: I0929 19:09:01.804041 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e174dbcf-eda1-4bd6-8b77-0f91d12e3e41-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "e174dbcf-eda1-4bd6-8b77-0f91d12e3e41" (UID: "e174dbcf-eda1-4bd6-8b77-0f91d12e3e41"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:09:01 crc kubenswrapper[4792]: I0929 19:09:01.805770 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e174dbcf-eda1-4bd6-8b77-0f91d12e3e41-kube-api-access-64jk2" (OuterVolumeSpecName: "kube-api-access-64jk2") pod "e174dbcf-eda1-4bd6-8b77-0f91d12e3e41" (UID: "e174dbcf-eda1-4bd6-8b77-0f91d12e3e41"). InnerVolumeSpecName "kube-api-access-64jk2". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:09:01 crc kubenswrapper[4792]: I0929 19:09:01.876682 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/ade21d99-04df-430e-afa6-dc28ad305b89-client-ca\") pod \"route-controller-manager-d5b77d6f6-qqq6l\" (UID: \"ade21d99-04df-430e-afa6-dc28ad305b89\") " pod="openshift-route-controller-manager/route-controller-manager-d5b77d6f6-qqq6l" Sep 29 19:09:01 crc kubenswrapper[4792]: I0929 19:09:01.876729 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ade21d99-04df-430e-afa6-dc28ad305b89-serving-cert\") pod \"route-controller-manager-d5b77d6f6-qqq6l\" (UID: \"ade21d99-04df-430e-afa6-dc28ad305b89\") " pod="openshift-route-controller-manager/route-controller-manager-d5b77d6f6-qqq6l" Sep 29 19:09:01 crc kubenswrapper[4792]: I0929 19:09:01.876765 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sm7qd\" (UniqueName: \"kubernetes.io/projected/ade21d99-04df-430e-afa6-dc28ad305b89-kube-api-access-sm7qd\") pod \"route-controller-manager-d5b77d6f6-qqq6l\" (UID: \"ade21d99-04df-430e-afa6-dc28ad305b89\") " pod="openshift-route-controller-manager/route-controller-manager-d5b77d6f6-qqq6l" Sep 29 19:09:01 crc kubenswrapper[4792]: I0929 19:09:01.876787 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ade21d99-04df-430e-afa6-dc28ad305b89-config\") pod \"route-controller-manager-d5b77d6f6-qqq6l\" (UID: \"ade21d99-04df-430e-afa6-dc28ad305b89\") " pod="openshift-route-controller-manager/route-controller-manager-d5b77d6f6-qqq6l" Sep 29 19:09:01 crc kubenswrapper[4792]: I0929 19:09:01.876869 4792 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e174dbcf-eda1-4bd6-8b77-0f91d12e3e41-config\") on node \"crc\" DevicePath \"\"" Sep 29 19:09:01 crc kubenswrapper[4792]: I0929 19:09:01.876880 4792 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e174dbcf-eda1-4bd6-8b77-0f91d12e3e41-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 29 19:09:01 crc kubenswrapper[4792]: I0929 19:09:01.876889 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-64jk2\" (UniqueName: \"kubernetes.io/projected/e174dbcf-eda1-4bd6-8b77-0f91d12e3e41-kube-api-access-64jk2\") on node \"crc\" DevicePath \"\"" Sep 29 19:09:01 crc kubenswrapper[4792]: I0929 19:09:01.876900 4792 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/e174dbcf-eda1-4bd6-8b77-0f91d12e3e41-client-ca\") on node \"crc\" DevicePath \"\"" Sep 29 19:09:01 crc kubenswrapper[4792]: I0929 19:09:01.877731 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/ade21d99-04df-430e-afa6-dc28ad305b89-client-ca\") pod \"route-controller-manager-d5b77d6f6-qqq6l\" (UID: \"ade21d99-04df-430e-afa6-dc28ad305b89\") " pod="openshift-route-controller-manager/route-controller-manager-d5b77d6f6-qqq6l" Sep 29 19:09:01 crc kubenswrapper[4792]: I0929 19:09:01.878159 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ade21d99-04df-430e-afa6-dc28ad305b89-config\") pod \"route-controller-manager-d5b77d6f6-qqq6l\" (UID: \"ade21d99-04df-430e-afa6-dc28ad305b89\") " pod="openshift-route-controller-manager/route-controller-manager-d5b77d6f6-qqq6l" Sep 29 19:09:01 crc kubenswrapper[4792]: I0929 19:09:01.881745 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ade21d99-04df-430e-afa6-dc28ad305b89-serving-cert\") pod \"route-controller-manager-d5b77d6f6-qqq6l\" (UID: \"ade21d99-04df-430e-afa6-dc28ad305b89\") " pod="openshift-route-controller-manager/route-controller-manager-d5b77d6f6-qqq6l" Sep 29 19:09:01 crc kubenswrapper[4792]: I0929 19:09:01.895705 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sm7qd\" (UniqueName: \"kubernetes.io/projected/ade21d99-04df-430e-afa6-dc28ad305b89-kube-api-access-sm7qd\") pod \"route-controller-manager-d5b77d6f6-qqq6l\" (UID: \"ade21d99-04df-430e-afa6-dc28ad305b89\") " pod="openshift-route-controller-manager/route-controller-manager-d5b77d6f6-qqq6l" Sep 29 19:09:01 crc kubenswrapper[4792]: I0929 19:09:01.952634 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-d5b77d6f6-qqq6l" Sep 29 19:09:02 crc kubenswrapper[4792]: I0929 19:09:02.240475 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-d5b77d6f6-qqq6l"] Sep 29 19:09:02 crc kubenswrapper[4792]: W0929 19:09:02.275129 4792 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podade21d99_04df_430e_afa6_dc28ad305b89.slice/crio-10a7295d1b4a5c2fe052ce44911cc6020baecbdfbca105e25d4a0de632123f74 WatchSource:0}: Error finding container 10a7295d1b4a5c2fe052ce44911cc6020baecbdfbca105e25d4a0de632123f74: Status 404 returned error can't find the container with id 10a7295d1b4a5c2fe052ce44911cc6020baecbdfbca105e25d4a0de632123f74 Sep 29 19:09:02 crc kubenswrapper[4792]: I0929 19:09:02.570399 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-d5b77d6f6-qqq6l" event={"ID":"ade21d99-04df-430e-afa6-dc28ad305b89","Type":"ContainerStarted","Data":"10a7295d1b4a5c2fe052ce44911cc6020baecbdfbca105e25d4a0de632123f74"} Sep 29 19:09:02 crc kubenswrapper[4792]: I0929 19:09:02.570625 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-7ccddcb4c6-7grs4" Sep 29 19:09:02 crc kubenswrapper[4792]: I0929 19:09:02.597199 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-7ccddcb4c6-7grs4"] Sep 29 19:09:02 crc kubenswrapper[4792]: I0929 19:09:02.603596 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-7ccddcb4c6-7grs4"] Sep 29 19:09:03 crc kubenswrapper[4792]: I0929 19:09:03.022744 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e174dbcf-eda1-4bd6-8b77-0f91d12e3e41" path="/var/lib/kubelet/pods/e174dbcf-eda1-4bd6-8b77-0f91d12e3e41/volumes" Sep 29 19:09:03 crc kubenswrapper[4792]: I0929 19:09:03.577963 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-d5b77d6f6-qqq6l" event={"ID":"ade21d99-04df-430e-afa6-dc28ad305b89","Type":"ContainerStarted","Data":"ec93fe588a33b2e44268a809cc85d891813975875885c85814c8a456e4a7f8ed"} Sep 29 19:09:03 crc kubenswrapper[4792]: I0929 19:09:03.596276 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-d5b77d6f6-qqq6l" podStartSLOduration=4.596253428 podStartE2EDuration="4.596253428s" podCreationTimestamp="2025-09-29 19:08:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 19:09:03.59191521 +0000 UTC m=+755.585222626" watchObservedRunningTime="2025-09-29 19:09:03.596253428 +0000 UTC m=+755.589560824" Sep 29 19:09:04 crc kubenswrapper[4792]: I0929 19:09:04.584159 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-operator-5d6f6cfd66-qr5w9" event={"ID":"6a491845-f4ef-4f82-b716-d46be2982350","Type":"ContainerStarted","Data":"f7f334c232b06b310134116c20acb30b4b02ba934aa5247e5a53ef1fe7d0f72e"} Sep 29 19:09:04 crc kubenswrapper[4792]: I0929 19:09:04.584453 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-d5b77d6f6-qqq6l" Sep 29 19:09:04 crc kubenswrapper[4792]: I0929 19:09:04.589298 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-d5b77d6f6-qqq6l" Sep 29 19:09:04 crc kubenswrapper[4792]: I0929 19:09:04.630287 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-operator-5d6f6cfd66-qr5w9" podStartSLOduration=2.033411713 podStartE2EDuration="6.630270225s" podCreationTimestamp="2025-09-29 19:08:58 +0000 UTC" firstStartedPulling="2025-09-29 19:08:59.129462052 +0000 UTC m=+751.122769448" lastFinishedPulling="2025-09-29 19:09:03.726320574 +0000 UTC m=+755.719627960" observedRunningTime="2025-09-29 19:09:04.607313447 +0000 UTC m=+756.600620843" watchObservedRunningTime="2025-09-29 19:09:04.630270225 +0000 UTC m=+756.623577621" Sep 29 19:09:05 crc kubenswrapper[4792]: I0929 19:09:05.494305 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-metrics-58fcddf996-kjrzl"] Sep 29 19:09:05 crc kubenswrapper[4792]: I0929 19:09:05.495363 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-metrics-58fcddf996-kjrzl" Sep 29 19:09:05 crc kubenswrapper[4792]: I0929 19:09:05.500946 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"nmstate-handler-dockercfg-k5sf9" Sep 29 19:09:05 crc kubenswrapper[4792]: I0929 19:09:05.514581 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-metrics-58fcddf996-kjrzl"] Sep 29 19:09:05 crc kubenswrapper[4792]: I0929 19:09:05.528313 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-swwq8\" (UniqueName: \"kubernetes.io/projected/a1f9e458-fc58-4f84-89fc-9196c747d6ba-kube-api-access-swwq8\") pod \"nmstate-metrics-58fcddf996-kjrzl\" (UID: \"a1f9e458-fc58-4f84-89fc-9196c747d6ba\") " pod="openshift-nmstate/nmstate-metrics-58fcddf996-kjrzl" Sep 29 19:09:05 crc kubenswrapper[4792]: I0929 19:09:05.536886 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-webhook-6d689559c5-9gcgx"] Sep 29 19:09:05 crc kubenswrapper[4792]: I0929 19:09:05.537731 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-webhook-6d689559c5-9gcgx" Sep 29 19:09:05 crc kubenswrapper[4792]: I0929 19:09:05.550311 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"openshift-nmstate-webhook" Sep 29 19:09:05 crc kubenswrapper[4792]: I0929 19:09:05.565411 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-webhook-6d689559c5-9gcgx"] Sep 29 19:09:05 crc kubenswrapper[4792]: I0929 19:09:05.568496 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-handler-w7wwz"] Sep 29 19:09:05 crc kubenswrapper[4792]: I0929 19:09:05.569112 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-handler-w7wwz" Sep 29 19:09:05 crc kubenswrapper[4792]: I0929 19:09:05.629305 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tgf6k\" (UniqueName: \"kubernetes.io/projected/7d8ab4ec-b506-4549-be62-9b914b9cb3f3-kube-api-access-tgf6k\") pod \"nmstate-handler-w7wwz\" (UID: \"7d8ab4ec-b506-4549-be62-9b914b9cb3f3\") " pod="openshift-nmstate/nmstate-handler-w7wwz" Sep 29 19:09:05 crc kubenswrapper[4792]: I0929 19:09:05.629343 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/7d8ab4ec-b506-4549-be62-9b914b9cb3f3-dbus-socket\") pod \"nmstate-handler-w7wwz\" (UID: \"7d8ab4ec-b506-4549-be62-9b914b9cb3f3\") " pod="openshift-nmstate/nmstate-handler-w7wwz" Sep 29 19:09:05 crc kubenswrapper[4792]: I0929 19:09:05.629386 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-swwq8\" (UniqueName: \"kubernetes.io/projected/a1f9e458-fc58-4f84-89fc-9196c747d6ba-kube-api-access-swwq8\") pod \"nmstate-metrics-58fcddf996-kjrzl\" (UID: \"a1f9e458-fc58-4f84-89fc-9196c747d6ba\") " pod="openshift-nmstate/nmstate-metrics-58fcddf996-kjrzl" Sep 29 19:09:05 crc kubenswrapper[4792]: I0929 19:09:05.629425 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/7d8ab4ec-b506-4549-be62-9b914b9cb3f3-nmstate-lock\") pod \"nmstate-handler-w7wwz\" (UID: \"7d8ab4ec-b506-4549-be62-9b914b9cb3f3\") " pod="openshift-nmstate/nmstate-handler-w7wwz" Sep 29 19:09:05 crc kubenswrapper[4792]: I0929 19:09:05.629456 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/7d8ab4ec-b506-4549-be62-9b914b9cb3f3-ovs-socket\") pod \"nmstate-handler-w7wwz\" (UID: \"7d8ab4ec-b506-4549-be62-9b914b9cb3f3\") " pod="openshift-nmstate/nmstate-handler-w7wwz" Sep 29 19:09:05 crc kubenswrapper[4792]: I0929 19:09:05.665958 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-swwq8\" (UniqueName: \"kubernetes.io/projected/a1f9e458-fc58-4f84-89fc-9196c747d6ba-kube-api-access-swwq8\") pod \"nmstate-metrics-58fcddf996-kjrzl\" (UID: \"a1f9e458-fc58-4f84-89fc-9196c747d6ba\") " pod="openshift-nmstate/nmstate-metrics-58fcddf996-kjrzl" Sep 29 19:09:05 crc kubenswrapper[4792]: I0929 19:09:05.685956 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-console-plugin-864bb6dfb5-ps4tp"] Sep 29 19:09:05 crc kubenswrapper[4792]: I0929 19:09:05.686653 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-console-plugin-864bb6dfb5-ps4tp" Sep 29 19:09:05 crc kubenswrapper[4792]: I0929 19:09:05.691177 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"plugin-serving-cert" Sep 29 19:09:05 crc kubenswrapper[4792]: I0929 19:09:05.691429 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"default-dockercfg-44zxz" Sep 29 19:09:05 crc kubenswrapper[4792]: I0929 19:09:05.693451 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"nginx-conf" Sep 29 19:09:05 crc kubenswrapper[4792]: I0929 19:09:05.736530 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/7d8ab4ec-b506-4549-be62-9b914b9cb3f3-nmstate-lock\") pod \"nmstate-handler-w7wwz\" (UID: \"7d8ab4ec-b506-4549-be62-9b914b9cb3f3\") " pod="openshift-nmstate/nmstate-handler-w7wwz" Sep 29 19:09:05 crc kubenswrapper[4792]: I0929 19:09:05.736615 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/40d01e5b-1274-48a1-8510-4386dd7150bb-plugin-serving-cert\") pod \"nmstate-console-plugin-864bb6dfb5-ps4tp\" (UID: \"40d01e5b-1274-48a1-8510-4386dd7150bb\") " pod="openshift-nmstate/nmstate-console-plugin-864bb6dfb5-ps4tp" Sep 29 19:09:05 crc kubenswrapper[4792]: I0929 19:09:05.736663 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/7d8ab4ec-b506-4549-be62-9b914b9cb3f3-ovs-socket\") pod \"nmstate-handler-w7wwz\" (UID: \"7d8ab4ec-b506-4549-be62-9b914b9cb3f3\") " pod="openshift-nmstate/nmstate-handler-w7wwz" Sep 29 19:09:05 crc kubenswrapper[4792]: I0929 19:09:05.736706 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/7d8ab4ec-b506-4549-be62-9b914b9cb3f3-ovs-socket\") pod \"nmstate-handler-w7wwz\" (UID: \"7d8ab4ec-b506-4549-be62-9b914b9cb3f3\") " pod="openshift-nmstate/nmstate-handler-w7wwz" Sep 29 19:09:05 crc kubenswrapper[4792]: I0929 19:09:05.736824 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tgf6k\" (UniqueName: \"kubernetes.io/projected/7d8ab4ec-b506-4549-be62-9b914b9cb3f3-kube-api-access-tgf6k\") pod \"nmstate-handler-w7wwz\" (UID: \"7d8ab4ec-b506-4549-be62-9b914b9cb3f3\") " pod="openshift-nmstate/nmstate-handler-w7wwz" Sep 29 19:09:05 crc kubenswrapper[4792]: I0929 19:09:05.736942 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/7d8ab4ec-b506-4549-be62-9b914b9cb3f3-dbus-socket\") pod \"nmstate-handler-w7wwz\" (UID: \"7d8ab4ec-b506-4549-be62-9b914b9cb3f3\") " pod="openshift-nmstate/nmstate-handler-w7wwz" Sep 29 19:09:05 crc kubenswrapper[4792]: I0929 19:09:05.736987 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2hxvx\" (UniqueName: \"kubernetes.io/projected/40d01e5b-1274-48a1-8510-4386dd7150bb-kube-api-access-2hxvx\") pod \"nmstate-console-plugin-864bb6dfb5-ps4tp\" (UID: \"40d01e5b-1274-48a1-8510-4386dd7150bb\") " pod="openshift-nmstate/nmstate-console-plugin-864bb6dfb5-ps4tp" Sep 29 19:09:05 crc kubenswrapper[4792]: I0929 19:09:05.737048 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/40d01e5b-1274-48a1-8510-4386dd7150bb-nginx-conf\") pod \"nmstate-console-plugin-864bb6dfb5-ps4tp\" (UID: \"40d01e5b-1274-48a1-8510-4386dd7150bb\") " pod="openshift-nmstate/nmstate-console-plugin-864bb6dfb5-ps4tp" Sep 29 19:09:05 crc kubenswrapper[4792]: I0929 19:09:05.737069 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/7d8ab4ec-b506-4549-be62-9b914b9cb3f3-nmstate-lock\") pod \"nmstate-handler-w7wwz\" (UID: \"7d8ab4ec-b506-4549-be62-9b914b9cb3f3\") " pod="openshift-nmstate/nmstate-handler-w7wwz" Sep 29 19:09:05 crc kubenswrapper[4792]: I0929 19:09:05.737094 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jvl98\" (UniqueName: \"kubernetes.io/projected/66a5aaa8-8ce1-4d34-a58e-843ff50ca9ef-kube-api-access-jvl98\") pod \"nmstate-webhook-6d689559c5-9gcgx\" (UID: \"66a5aaa8-8ce1-4d34-a58e-843ff50ca9ef\") " pod="openshift-nmstate/nmstate-webhook-6d689559c5-9gcgx" Sep 29 19:09:05 crc kubenswrapper[4792]: I0929 19:09:05.737198 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/66a5aaa8-8ce1-4d34-a58e-843ff50ca9ef-tls-key-pair\") pod \"nmstate-webhook-6d689559c5-9gcgx\" (UID: \"66a5aaa8-8ce1-4d34-a58e-843ff50ca9ef\") " pod="openshift-nmstate/nmstate-webhook-6d689559c5-9gcgx" Sep 29 19:09:05 crc kubenswrapper[4792]: I0929 19:09:05.737668 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/7d8ab4ec-b506-4549-be62-9b914b9cb3f3-dbus-socket\") pod \"nmstate-handler-w7wwz\" (UID: \"7d8ab4ec-b506-4549-be62-9b914b9cb3f3\") " pod="openshift-nmstate/nmstate-handler-w7wwz" Sep 29 19:09:05 crc kubenswrapper[4792]: I0929 19:09:05.739899 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-console-plugin-864bb6dfb5-ps4tp"] Sep 29 19:09:05 crc kubenswrapper[4792]: I0929 19:09:05.779111 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tgf6k\" (UniqueName: \"kubernetes.io/projected/7d8ab4ec-b506-4549-be62-9b914b9cb3f3-kube-api-access-tgf6k\") pod \"nmstate-handler-w7wwz\" (UID: \"7d8ab4ec-b506-4549-be62-9b914b9cb3f3\") " pod="openshift-nmstate/nmstate-handler-w7wwz" Sep 29 19:09:05 crc kubenswrapper[4792]: I0929 19:09:05.810502 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-metrics-58fcddf996-kjrzl" Sep 29 19:09:05 crc kubenswrapper[4792]: I0929 19:09:05.837790 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2hxvx\" (UniqueName: \"kubernetes.io/projected/40d01e5b-1274-48a1-8510-4386dd7150bb-kube-api-access-2hxvx\") pod \"nmstate-console-plugin-864bb6dfb5-ps4tp\" (UID: \"40d01e5b-1274-48a1-8510-4386dd7150bb\") " pod="openshift-nmstate/nmstate-console-plugin-864bb6dfb5-ps4tp" Sep 29 19:09:05 crc kubenswrapper[4792]: I0929 19:09:05.837977 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/40d01e5b-1274-48a1-8510-4386dd7150bb-nginx-conf\") pod \"nmstate-console-plugin-864bb6dfb5-ps4tp\" (UID: \"40d01e5b-1274-48a1-8510-4386dd7150bb\") " pod="openshift-nmstate/nmstate-console-plugin-864bb6dfb5-ps4tp" Sep 29 19:09:05 crc kubenswrapper[4792]: I0929 19:09:05.838000 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jvl98\" (UniqueName: \"kubernetes.io/projected/66a5aaa8-8ce1-4d34-a58e-843ff50ca9ef-kube-api-access-jvl98\") pod \"nmstate-webhook-6d689559c5-9gcgx\" (UID: \"66a5aaa8-8ce1-4d34-a58e-843ff50ca9ef\") " pod="openshift-nmstate/nmstate-webhook-6d689559c5-9gcgx" Sep 29 19:09:05 crc kubenswrapper[4792]: I0929 19:09:05.838971 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/40d01e5b-1274-48a1-8510-4386dd7150bb-nginx-conf\") pod \"nmstate-console-plugin-864bb6dfb5-ps4tp\" (UID: \"40d01e5b-1274-48a1-8510-4386dd7150bb\") " pod="openshift-nmstate/nmstate-console-plugin-864bb6dfb5-ps4tp" Sep 29 19:09:05 crc kubenswrapper[4792]: I0929 19:09:05.839037 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/66a5aaa8-8ce1-4d34-a58e-843ff50ca9ef-tls-key-pair\") pod \"nmstate-webhook-6d689559c5-9gcgx\" (UID: \"66a5aaa8-8ce1-4d34-a58e-843ff50ca9ef\") " pod="openshift-nmstate/nmstate-webhook-6d689559c5-9gcgx" Sep 29 19:09:05 crc kubenswrapper[4792]: E0929 19:09:05.839222 4792 secret.go:188] Couldn't get secret openshift-nmstate/plugin-serving-cert: secret "plugin-serving-cert" not found Sep 29 19:09:05 crc kubenswrapper[4792]: E0929 19:09:05.839289 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/40d01e5b-1274-48a1-8510-4386dd7150bb-plugin-serving-cert podName:40d01e5b-1274-48a1-8510-4386dd7150bb nodeName:}" failed. No retries permitted until 2025-09-29 19:09:06.339270565 +0000 UTC m=+758.332577961 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "plugin-serving-cert" (UniqueName: "kubernetes.io/secret/40d01e5b-1274-48a1-8510-4386dd7150bb-plugin-serving-cert") pod "nmstate-console-plugin-864bb6dfb5-ps4tp" (UID: "40d01e5b-1274-48a1-8510-4386dd7150bb") : secret "plugin-serving-cert" not found Sep 29 19:09:05 crc kubenswrapper[4792]: I0929 19:09:05.839413 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/40d01e5b-1274-48a1-8510-4386dd7150bb-plugin-serving-cert\") pod \"nmstate-console-plugin-864bb6dfb5-ps4tp\" (UID: \"40d01e5b-1274-48a1-8510-4386dd7150bb\") " pod="openshift-nmstate/nmstate-console-plugin-864bb6dfb5-ps4tp" Sep 29 19:09:05 crc kubenswrapper[4792]: I0929 19:09:05.842525 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/66a5aaa8-8ce1-4d34-a58e-843ff50ca9ef-tls-key-pair\") pod \"nmstate-webhook-6d689559c5-9gcgx\" (UID: \"66a5aaa8-8ce1-4d34-a58e-843ff50ca9ef\") " pod="openshift-nmstate/nmstate-webhook-6d689559c5-9gcgx" Sep 29 19:09:05 crc kubenswrapper[4792]: I0929 19:09:05.857061 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jvl98\" (UniqueName: \"kubernetes.io/projected/66a5aaa8-8ce1-4d34-a58e-843ff50ca9ef-kube-api-access-jvl98\") pod \"nmstate-webhook-6d689559c5-9gcgx\" (UID: \"66a5aaa8-8ce1-4d34-a58e-843ff50ca9ef\") " pod="openshift-nmstate/nmstate-webhook-6d689559c5-9gcgx" Sep 29 19:09:05 crc kubenswrapper[4792]: I0929 19:09:05.859081 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2hxvx\" (UniqueName: \"kubernetes.io/projected/40d01e5b-1274-48a1-8510-4386dd7150bb-kube-api-access-2hxvx\") pod \"nmstate-console-plugin-864bb6dfb5-ps4tp\" (UID: \"40d01e5b-1274-48a1-8510-4386dd7150bb\") " pod="openshift-nmstate/nmstate-console-plugin-864bb6dfb5-ps4tp" Sep 29 19:09:05 crc kubenswrapper[4792]: I0929 19:09:05.883482 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-handler-w7wwz" Sep 29 19:09:05 crc kubenswrapper[4792]: I0929 19:09:05.901642 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/console-c6d85c744-pz6sl"] Sep 29 19:09:05 crc kubenswrapper[4792]: I0929 19:09:05.902524 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-c6d85c744-pz6sl" Sep 29 19:09:05 crc kubenswrapper[4792]: I0929 19:09:05.933305 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-c6d85c744-pz6sl"] Sep 29 19:09:05 crc kubenswrapper[4792]: W0929 19:09:05.938864 4792 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod7d8ab4ec_b506_4549_be62_9b914b9cb3f3.slice/crio-0a215f482e364e7ce38c00c0113cf517f777cadc6764e0cf3acb14e699036b61 WatchSource:0}: Error finding container 0a215f482e364e7ce38c00c0113cf517f777cadc6764e0cf3acb14e699036b61: Status 404 returned error can't find the container with id 0a215f482e364e7ce38c00c0113cf517f777cadc6764e0cf3acb14e699036b61 Sep 29 19:09:05 crc kubenswrapper[4792]: I0929 19:09:05.940584 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/0784279a-cf17-4a52-a404-bcdf82e83b4c-console-serving-cert\") pod \"console-c6d85c744-pz6sl\" (UID: \"0784279a-cf17-4a52-a404-bcdf82e83b4c\") " pod="openshift-console/console-c6d85c744-pz6sl" Sep 29 19:09:05 crc kubenswrapper[4792]: I0929 19:09:05.940652 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/0784279a-cf17-4a52-a404-bcdf82e83b4c-oauth-serving-cert\") pod \"console-c6d85c744-pz6sl\" (UID: \"0784279a-cf17-4a52-a404-bcdf82e83b4c\") " pod="openshift-console/console-c6d85c744-pz6sl" Sep 29 19:09:05 crc kubenswrapper[4792]: I0929 19:09:05.940693 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/0784279a-cf17-4a52-a404-bcdf82e83b4c-console-config\") pod \"console-c6d85c744-pz6sl\" (UID: \"0784279a-cf17-4a52-a404-bcdf82e83b4c\") " pod="openshift-console/console-c6d85c744-pz6sl" Sep 29 19:09:05 crc kubenswrapper[4792]: I0929 19:09:05.940716 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0784279a-cf17-4a52-a404-bcdf82e83b4c-service-ca\") pod \"console-c6d85c744-pz6sl\" (UID: \"0784279a-cf17-4a52-a404-bcdf82e83b4c\") " pod="openshift-console/console-c6d85c744-pz6sl" Sep 29 19:09:05 crc kubenswrapper[4792]: I0929 19:09:05.940743 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/0784279a-cf17-4a52-a404-bcdf82e83b4c-console-oauth-config\") pod \"console-c6d85c744-pz6sl\" (UID: \"0784279a-cf17-4a52-a404-bcdf82e83b4c\") " pod="openshift-console/console-c6d85c744-pz6sl" Sep 29 19:09:05 crc kubenswrapper[4792]: I0929 19:09:05.940782 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/0784279a-cf17-4a52-a404-bcdf82e83b4c-trusted-ca-bundle\") pod \"console-c6d85c744-pz6sl\" (UID: \"0784279a-cf17-4a52-a404-bcdf82e83b4c\") " pod="openshift-console/console-c6d85c744-pz6sl" Sep 29 19:09:05 crc kubenswrapper[4792]: I0929 19:09:05.940830 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b828m\" (UniqueName: \"kubernetes.io/projected/0784279a-cf17-4a52-a404-bcdf82e83b4c-kube-api-access-b828m\") pod \"console-c6d85c744-pz6sl\" (UID: \"0784279a-cf17-4a52-a404-bcdf82e83b4c\") " pod="openshift-console/console-c6d85c744-pz6sl" Sep 29 19:09:06 crc kubenswrapper[4792]: I0929 19:09:06.041718 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b828m\" (UniqueName: \"kubernetes.io/projected/0784279a-cf17-4a52-a404-bcdf82e83b4c-kube-api-access-b828m\") pod \"console-c6d85c744-pz6sl\" (UID: \"0784279a-cf17-4a52-a404-bcdf82e83b4c\") " pod="openshift-console/console-c6d85c744-pz6sl" Sep 29 19:09:06 crc kubenswrapper[4792]: I0929 19:09:06.042099 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/0784279a-cf17-4a52-a404-bcdf82e83b4c-console-serving-cert\") pod \"console-c6d85c744-pz6sl\" (UID: \"0784279a-cf17-4a52-a404-bcdf82e83b4c\") " pod="openshift-console/console-c6d85c744-pz6sl" Sep 29 19:09:06 crc kubenswrapper[4792]: I0929 19:09:06.042148 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/0784279a-cf17-4a52-a404-bcdf82e83b4c-oauth-serving-cert\") pod \"console-c6d85c744-pz6sl\" (UID: \"0784279a-cf17-4a52-a404-bcdf82e83b4c\") " pod="openshift-console/console-c6d85c744-pz6sl" Sep 29 19:09:06 crc kubenswrapper[4792]: I0929 19:09:06.042176 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/0784279a-cf17-4a52-a404-bcdf82e83b4c-console-config\") pod \"console-c6d85c744-pz6sl\" (UID: \"0784279a-cf17-4a52-a404-bcdf82e83b4c\") " pod="openshift-console/console-c6d85c744-pz6sl" Sep 29 19:09:06 crc kubenswrapper[4792]: I0929 19:09:06.042213 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0784279a-cf17-4a52-a404-bcdf82e83b4c-service-ca\") pod \"console-c6d85c744-pz6sl\" (UID: \"0784279a-cf17-4a52-a404-bcdf82e83b4c\") " pod="openshift-console/console-c6d85c744-pz6sl" Sep 29 19:09:06 crc kubenswrapper[4792]: I0929 19:09:06.042288 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/0784279a-cf17-4a52-a404-bcdf82e83b4c-console-oauth-config\") pod \"console-c6d85c744-pz6sl\" (UID: \"0784279a-cf17-4a52-a404-bcdf82e83b4c\") " pod="openshift-console/console-c6d85c744-pz6sl" Sep 29 19:09:06 crc kubenswrapper[4792]: I0929 19:09:06.042321 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/0784279a-cf17-4a52-a404-bcdf82e83b4c-trusted-ca-bundle\") pod \"console-c6d85c744-pz6sl\" (UID: \"0784279a-cf17-4a52-a404-bcdf82e83b4c\") " pod="openshift-console/console-c6d85c744-pz6sl" Sep 29 19:09:06 crc kubenswrapper[4792]: I0929 19:09:06.043327 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/0784279a-cf17-4a52-a404-bcdf82e83b4c-console-config\") pod \"console-c6d85c744-pz6sl\" (UID: \"0784279a-cf17-4a52-a404-bcdf82e83b4c\") " pod="openshift-console/console-c6d85c744-pz6sl" Sep 29 19:09:06 crc kubenswrapper[4792]: I0929 19:09:06.043557 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/0784279a-cf17-4a52-a404-bcdf82e83b4c-trusted-ca-bundle\") pod \"console-c6d85c744-pz6sl\" (UID: \"0784279a-cf17-4a52-a404-bcdf82e83b4c\") " pod="openshift-console/console-c6d85c744-pz6sl" Sep 29 19:09:06 crc kubenswrapper[4792]: I0929 19:09:06.043615 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0784279a-cf17-4a52-a404-bcdf82e83b4c-service-ca\") pod \"console-c6d85c744-pz6sl\" (UID: \"0784279a-cf17-4a52-a404-bcdf82e83b4c\") " pod="openshift-console/console-c6d85c744-pz6sl" Sep 29 19:09:06 crc kubenswrapper[4792]: I0929 19:09:06.043670 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/0784279a-cf17-4a52-a404-bcdf82e83b4c-oauth-serving-cert\") pod \"console-c6d85c744-pz6sl\" (UID: \"0784279a-cf17-4a52-a404-bcdf82e83b4c\") " pod="openshift-console/console-c6d85c744-pz6sl" Sep 29 19:09:06 crc kubenswrapper[4792]: I0929 19:09:06.050372 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/0784279a-cf17-4a52-a404-bcdf82e83b4c-console-serving-cert\") pod \"console-c6d85c744-pz6sl\" (UID: \"0784279a-cf17-4a52-a404-bcdf82e83b4c\") " pod="openshift-console/console-c6d85c744-pz6sl" Sep 29 19:09:06 crc kubenswrapper[4792]: I0929 19:09:06.055475 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/0784279a-cf17-4a52-a404-bcdf82e83b4c-console-oauth-config\") pod \"console-c6d85c744-pz6sl\" (UID: \"0784279a-cf17-4a52-a404-bcdf82e83b4c\") " pod="openshift-console/console-c6d85c744-pz6sl" Sep 29 19:09:06 crc kubenswrapper[4792]: I0929 19:09:06.059623 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b828m\" (UniqueName: \"kubernetes.io/projected/0784279a-cf17-4a52-a404-bcdf82e83b4c-kube-api-access-b828m\") pod \"console-c6d85c744-pz6sl\" (UID: \"0784279a-cf17-4a52-a404-bcdf82e83b4c\") " pod="openshift-console/console-c6d85c744-pz6sl" Sep 29 19:09:06 crc kubenswrapper[4792]: I0929 19:09:06.157456 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-webhook-6d689559c5-9gcgx" Sep 29 19:09:06 crc kubenswrapper[4792]: I0929 19:09:06.218968 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-c6d85c744-pz6sl" Sep 29 19:09:06 crc kubenswrapper[4792]: I0929 19:09:06.308393 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-metrics-58fcddf996-kjrzl"] Sep 29 19:09:06 crc kubenswrapper[4792]: W0929 19:09:06.318094 4792 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda1f9e458_fc58_4f84_89fc_9196c747d6ba.slice/crio-084f3895a5900142abb311b13d31433aafb105fcf2916f58971736185e6bc5e9 WatchSource:0}: Error finding container 084f3895a5900142abb311b13d31433aafb105fcf2916f58971736185e6bc5e9: Status 404 returned error can't find the container with id 084f3895a5900142abb311b13d31433aafb105fcf2916f58971736185e6bc5e9 Sep 29 19:09:06 crc kubenswrapper[4792]: I0929 19:09:06.345960 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/40d01e5b-1274-48a1-8510-4386dd7150bb-plugin-serving-cert\") pod \"nmstate-console-plugin-864bb6dfb5-ps4tp\" (UID: \"40d01e5b-1274-48a1-8510-4386dd7150bb\") " pod="openshift-nmstate/nmstate-console-plugin-864bb6dfb5-ps4tp" Sep 29 19:09:06 crc kubenswrapper[4792]: I0929 19:09:06.349088 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/40d01e5b-1274-48a1-8510-4386dd7150bb-plugin-serving-cert\") pod \"nmstate-console-plugin-864bb6dfb5-ps4tp\" (UID: \"40d01e5b-1274-48a1-8510-4386dd7150bb\") " pod="openshift-nmstate/nmstate-console-plugin-864bb6dfb5-ps4tp" Sep 29 19:09:06 crc kubenswrapper[4792]: I0929 19:09:06.528581 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-webhook-6d689559c5-9gcgx"] Sep 29 19:09:06 crc kubenswrapper[4792]: W0929 19:09:06.535498 4792 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod66a5aaa8_8ce1_4d34_a58e_843ff50ca9ef.slice/crio-47aebca1ed54abf2976f3d703eaf99cfd6f1edbce94991ff7a89a6647757532d WatchSource:0}: Error finding container 47aebca1ed54abf2976f3d703eaf99cfd6f1edbce94991ff7a89a6647757532d: Status 404 returned error can't find the container with id 47aebca1ed54abf2976f3d703eaf99cfd6f1edbce94991ff7a89a6647757532d Sep 29 19:09:06 crc kubenswrapper[4792]: I0929 19:09:06.599412 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-58fcddf996-kjrzl" event={"ID":"a1f9e458-fc58-4f84-89fc-9196c747d6ba","Type":"ContainerStarted","Data":"084f3895a5900142abb311b13d31433aafb105fcf2916f58971736185e6bc5e9"} Sep 29 19:09:06 crc kubenswrapper[4792]: I0929 19:09:06.600119 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-handler-w7wwz" event={"ID":"7d8ab4ec-b506-4549-be62-9b914b9cb3f3","Type":"ContainerStarted","Data":"0a215f482e364e7ce38c00c0113cf517f777cadc6764e0cf3acb14e699036b61"} Sep 29 19:09:06 crc kubenswrapper[4792]: I0929 19:09:06.601157 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-webhook-6d689559c5-9gcgx" event={"ID":"66a5aaa8-8ce1-4d34-a58e-843ff50ca9ef","Type":"ContainerStarted","Data":"47aebca1ed54abf2976f3d703eaf99cfd6f1edbce94991ff7a89a6647757532d"} Sep 29 19:09:06 crc kubenswrapper[4792]: I0929 19:09:06.606691 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-console-plugin-864bb6dfb5-ps4tp" Sep 29 19:09:06 crc kubenswrapper[4792]: I0929 19:09:06.655605 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-c6d85c744-pz6sl"] Sep 29 19:09:06 crc kubenswrapper[4792]: W0929 19:09:06.683264 4792 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod0784279a_cf17_4a52_a404_bcdf82e83b4c.slice/crio-96e10fc5c43db9d46504f6c174df9fdf7e718dd86b21a56a0221519107d0cda6 WatchSource:0}: Error finding container 96e10fc5c43db9d46504f6c174df9fdf7e718dd86b21a56a0221519107d0cda6: Status 404 returned error can't find the container with id 96e10fc5c43db9d46504f6c174df9fdf7e718dd86b21a56a0221519107d0cda6 Sep 29 19:09:07 crc kubenswrapper[4792]: I0929 19:09:07.025655 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-console-plugin-864bb6dfb5-ps4tp"] Sep 29 19:09:07 crc kubenswrapper[4792]: I0929 19:09:07.607915 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-c6d85c744-pz6sl" event={"ID":"0784279a-cf17-4a52-a404-bcdf82e83b4c","Type":"ContainerStarted","Data":"78ca337692124a8c547461c20cd363ace59d1dcb251d4f6c5d91b92ee1752b7b"} Sep 29 19:09:07 crc kubenswrapper[4792]: I0929 19:09:07.608242 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-c6d85c744-pz6sl" event={"ID":"0784279a-cf17-4a52-a404-bcdf82e83b4c","Type":"ContainerStarted","Data":"96e10fc5c43db9d46504f6c174df9fdf7e718dd86b21a56a0221519107d0cda6"} Sep 29 19:09:07 crc kubenswrapper[4792]: I0929 19:09:07.609180 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-console-plugin-864bb6dfb5-ps4tp" event={"ID":"40d01e5b-1274-48a1-8510-4386dd7150bb","Type":"ContainerStarted","Data":"fc3f931bb7f1ce52383a24f9efe92c29cf090ec1db24810fdd5a720499427d75"} Sep 29 19:09:07 crc kubenswrapper[4792]: I0929 19:09:07.786391 4792 dynamic_cafile_content.go:123] "Loaded a new CA Bundle and Verifier" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Sep 29 19:09:08 crc kubenswrapper[4792]: I0929 19:09:08.634711 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/console-c6d85c744-pz6sl" podStartSLOduration=3.634693613 podStartE2EDuration="3.634693613s" podCreationTimestamp="2025-09-29 19:09:05 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 19:09:08.634122507 +0000 UTC m=+760.627429913" watchObservedRunningTime="2025-09-29 19:09:08.634693613 +0000 UTC m=+760.628001009" Sep 29 19:09:10 crc kubenswrapper[4792]: I0929 19:09:10.631238 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-webhook-6d689559c5-9gcgx" event={"ID":"66a5aaa8-8ce1-4d34-a58e-843ff50ca9ef","Type":"ContainerStarted","Data":"06bd995b65a330d78718d1d438b808dbf2d8f43d5dc42a3f28e7b0bd3f807412"} Sep 29 19:09:10 crc kubenswrapper[4792]: I0929 19:09:10.631705 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-nmstate/nmstate-webhook-6d689559c5-9gcgx" Sep 29 19:09:10 crc kubenswrapper[4792]: I0929 19:09:10.632454 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-58fcddf996-kjrzl" event={"ID":"a1f9e458-fc58-4f84-89fc-9196c747d6ba","Type":"ContainerStarted","Data":"3ff733ea31f76fa3389c553b353eeebeaff31ecba57e40e0c6b24878ef06232e"} Sep 29 19:09:10 crc kubenswrapper[4792]: I0929 19:09:10.633858 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-handler-w7wwz" event={"ID":"7d8ab4ec-b506-4549-be62-9b914b9cb3f3","Type":"ContainerStarted","Data":"8ad9684ba3c9c5e5db5de87d4a4d18d9ee45fdf1ffbee4292e04a6683e2ceafe"} Sep 29 19:09:10 crc kubenswrapper[4792]: I0929 19:09:10.634133 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-nmstate/nmstate-handler-w7wwz" Sep 29 19:09:10 crc kubenswrapper[4792]: I0929 19:09:10.636967 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-console-plugin-864bb6dfb5-ps4tp" event={"ID":"40d01e5b-1274-48a1-8510-4386dd7150bb","Type":"ContainerStarted","Data":"eb731ae03b607c1be0c7175293da85c990344f40b63e9f128ba1c3b9d90fd98e"} Sep 29 19:09:10 crc kubenswrapper[4792]: I0929 19:09:10.647204 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-webhook-6d689559c5-9gcgx" podStartSLOduration=2.087750404 podStartE2EDuration="5.647187637s" podCreationTimestamp="2025-09-29 19:09:05 +0000 UTC" firstStartedPulling="2025-09-29 19:09:06.537912914 +0000 UTC m=+758.531220310" lastFinishedPulling="2025-09-29 19:09:10.097350147 +0000 UTC m=+762.090657543" observedRunningTime="2025-09-29 19:09:10.646306643 +0000 UTC m=+762.639614059" watchObservedRunningTime="2025-09-29 19:09:10.647187637 +0000 UTC m=+762.640495033" Sep 29 19:09:10 crc kubenswrapper[4792]: I0929 19:09:10.678105 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-console-plugin-864bb6dfb5-ps4tp" podStartSLOduration=2.621242787 podStartE2EDuration="5.678083692s" podCreationTimestamp="2025-09-29 19:09:05 +0000 UTC" firstStartedPulling="2025-09-29 19:09:07.041200301 +0000 UTC m=+759.034507697" lastFinishedPulling="2025-09-29 19:09:10.098041206 +0000 UTC m=+762.091348602" observedRunningTime="2025-09-29 19:09:10.665419056 +0000 UTC m=+762.658726452" watchObservedRunningTime="2025-09-29 19:09:10.678083692 +0000 UTC m=+762.671391088" Sep 29 19:09:10 crc kubenswrapper[4792]: I0929 19:09:10.698177 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-handler-w7wwz" podStartSLOduration=1.526151282 podStartE2EDuration="5.698158771s" podCreationTimestamp="2025-09-29 19:09:05 +0000 UTC" firstStartedPulling="2025-09-29 19:09:05.941957493 +0000 UTC m=+757.935264889" lastFinishedPulling="2025-09-29 19:09:10.113964982 +0000 UTC m=+762.107272378" observedRunningTime="2025-09-29 19:09:10.696764503 +0000 UTC m=+762.690071899" watchObservedRunningTime="2025-09-29 19:09:10.698158771 +0000 UTC m=+762.691466177" Sep 29 19:09:12 crc kubenswrapper[4792]: I0929 19:09:12.648965 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-58fcddf996-kjrzl" event={"ID":"a1f9e458-fc58-4f84-89fc-9196c747d6ba","Type":"ContainerStarted","Data":"0be8e7cf673de6f26bec2382ed91d75b02eb487ce126622e23c31824699033ba"} Sep 29 19:09:13 crc kubenswrapper[4792]: I0929 19:09:13.213627 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-metrics-58fcddf996-kjrzl" podStartSLOduration=2.23420916 podStartE2EDuration="8.213610396s" podCreationTimestamp="2025-09-29 19:09:05 +0000 UTC" firstStartedPulling="2025-09-29 19:09:06.321116388 +0000 UTC m=+758.314423774" lastFinishedPulling="2025-09-29 19:09:12.300517614 +0000 UTC m=+764.293825010" observedRunningTime="2025-09-29 19:09:12.663756564 +0000 UTC m=+764.657063980" watchObservedRunningTime="2025-09-29 19:09:13.213610396 +0000 UTC m=+765.206917792" Sep 29 19:09:13 crc kubenswrapper[4792]: I0929 19:09:13.213823 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-vmlff"] Sep 29 19:09:13 crc kubenswrapper[4792]: I0929 19:09:13.214953 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-vmlff" Sep 29 19:09:13 crc kubenswrapper[4792]: I0929 19:09:13.235794 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-vmlff"] Sep 29 19:09:13 crc kubenswrapper[4792]: I0929 19:09:13.331521 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/78fca59f-c5f2-4558-ab07-122008a02f73-utilities\") pod \"redhat-marketplace-vmlff\" (UID: \"78fca59f-c5f2-4558-ab07-122008a02f73\") " pod="openshift-marketplace/redhat-marketplace-vmlff" Sep 29 19:09:13 crc kubenswrapper[4792]: I0929 19:09:13.331582 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7q74s\" (UniqueName: \"kubernetes.io/projected/78fca59f-c5f2-4558-ab07-122008a02f73-kube-api-access-7q74s\") pod \"redhat-marketplace-vmlff\" (UID: \"78fca59f-c5f2-4558-ab07-122008a02f73\") " pod="openshift-marketplace/redhat-marketplace-vmlff" Sep 29 19:09:13 crc kubenswrapper[4792]: I0929 19:09:13.331627 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/78fca59f-c5f2-4558-ab07-122008a02f73-catalog-content\") pod \"redhat-marketplace-vmlff\" (UID: \"78fca59f-c5f2-4558-ab07-122008a02f73\") " pod="openshift-marketplace/redhat-marketplace-vmlff" Sep 29 19:09:13 crc kubenswrapper[4792]: I0929 19:09:13.433308 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/78fca59f-c5f2-4558-ab07-122008a02f73-utilities\") pod \"redhat-marketplace-vmlff\" (UID: \"78fca59f-c5f2-4558-ab07-122008a02f73\") " pod="openshift-marketplace/redhat-marketplace-vmlff" Sep 29 19:09:13 crc kubenswrapper[4792]: I0929 19:09:13.433353 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7q74s\" (UniqueName: \"kubernetes.io/projected/78fca59f-c5f2-4558-ab07-122008a02f73-kube-api-access-7q74s\") pod \"redhat-marketplace-vmlff\" (UID: \"78fca59f-c5f2-4558-ab07-122008a02f73\") " pod="openshift-marketplace/redhat-marketplace-vmlff" Sep 29 19:09:13 crc kubenswrapper[4792]: I0929 19:09:13.433375 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/78fca59f-c5f2-4558-ab07-122008a02f73-catalog-content\") pod \"redhat-marketplace-vmlff\" (UID: \"78fca59f-c5f2-4558-ab07-122008a02f73\") " pod="openshift-marketplace/redhat-marketplace-vmlff" Sep 29 19:09:13 crc kubenswrapper[4792]: I0929 19:09:13.433782 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/78fca59f-c5f2-4558-ab07-122008a02f73-catalog-content\") pod \"redhat-marketplace-vmlff\" (UID: \"78fca59f-c5f2-4558-ab07-122008a02f73\") " pod="openshift-marketplace/redhat-marketplace-vmlff" Sep 29 19:09:13 crc kubenswrapper[4792]: I0929 19:09:13.433972 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/78fca59f-c5f2-4558-ab07-122008a02f73-utilities\") pod \"redhat-marketplace-vmlff\" (UID: \"78fca59f-c5f2-4558-ab07-122008a02f73\") " pod="openshift-marketplace/redhat-marketplace-vmlff" Sep 29 19:09:13 crc kubenswrapper[4792]: I0929 19:09:13.456843 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7q74s\" (UniqueName: \"kubernetes.io/projected/78fca59f-c5f2-4558-ab07-122008a02f73-kube-api-access-7q74s\") pod \"redhat-marketplace-vmlff\" (UID: \"78fca59f-c5f2-4558-ab07-122008a02f73\") " pod="openshift-marketplace/redhat-marketplace-vmlff" Sep 29 19:09:13 crc kubenswrapper[4792]: I0929 19:09:13.527223 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-vmlff" Sep 29 19:09:13 crc kubenswrapper[4792]: I0929 19:09:13.932239 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-vmlff"] Sep 29 19:09:14 crc kubenswrapper[4792]: I0929 19:09:14.662398 4792 generic.go:334] "Generic (PLEG): container finished" podID="78fca59f-c5f2-4558-ab07-122008a02f73" containerID="924c78e0a56dc9493fd5a5e33cf3d9119dbe92883892ea46caae1b3afce17869" exitCode=0 Sep 29 19:09:14 crc kubenswrapper[4792]: I0929 19:09:14.662525 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-vmlff" event={"ID":"78fca59f-c5f2-4558-ab07-122008a02f73","Type":"ContainerDied","Data":"924c78e0a56dc9493fd5a5e33cf3d9119dbe92883892ea46caae1b3afce17869"} Sep 29 19:09:14 crc kubenswrapper[4792]: I0929 19:09:14.662669 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-vmlff" event={"ID":"78fca59f-c5f2-4558-ab07-122008a02f73","Type":"ContainerStarted","Data":"38d9684d1d8adb6e9ddae794bdbec2cb290f0cd33531e20868f169a2a61f9e93"} Sep 29 19:09:15 crc kubenswrapper[4792]: I0929 19:09:15.908900 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-nmstate/nmstate-handler-w7wwz" Sep 29 19:09:16 crc kubenswrapper[4792]: I0929 19:09:16.219111 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-console/console-c6d85c744-pz6sl" Sep 29 19:09:16 crc kubenswrapper[4792]: I0929 19:09:16.219153 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/console-c6d85c744-pz6sl" Sep 29 19:09:16 crc kubenswrapper[4792]: I0929 19:09:16.225097 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-console/console-c6d85c744-pz6sl" Sep 29 19:09:16 crc kubenswrapper[4792]: I0929 19:09:16.674541 4792 generic.go:334] "Generic (PLEG): container finished" podID="78fca59f-c5f2-4558-ab07-122008a02f73" containerID="f3b93d967869cd6ba661cd301d3da592b075c1491da570987ac7570db4accd79" exitCode=0 Sep 29 19:09:16 crc kubenswrapper[4792]: I0929 19:09:16.674609 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-vmlff" event={"ID":"78fca59f-c5f2-4558-ab07-122008a02f73","Type":"ContainerDied","Data":"f3b93d967869cd6ba661cd301d3da592b075c1491da570987ac7570db4accd79"} Sep 29 19:09:16 crc kubenswrapper[4792]: I0929 19:09:16.684914 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/console-c6d85c744-pz6sl" Sep 29 19:09:16 crc kubenswrapper[4792]: I0929 19:09:16.756629 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-console/console-f9d7485db-7pt7w"] Sep 29 19:09:17 crc kubenswrapper[4792]: I0929 19:09:17.682767 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-vmlff" event={"ID":"78fca59f-c5f2-4558-ab07-122008a02f73","Type":"ContainerStarted","Data":"307215ec37ae63dd5bd199e3a4c4d12699c24f26bc77e7dcf46cbc6e4af924c6"} Sep 29 19:09:17 crc kubenswrapper[4792]: I0929 19:09:17.707310 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-vmlff" podStartSLOduration=2.029843714 podStartE2EDuration="4.707287557s" podCreationTimestamp="2025-09-29 19:09:13 +0000 UTC" firstStartedPulling="2025-09-29 19:09:14.664008314 +0000 UTC m=+766.657315710" lastFinishedPulling="2025-09-29 19:09:17.341452157 +0000 UTC m=+769.334759553" observedRunningTime="2025-09-29 19:09:17.705253951 +0000 UTC m=+769.698561377" watchObservedRunningTime="2025-09-29 19:09:17.707287557 +0000 UTC m=+769.700594953" Sep 29 19:09:23 crc kubenswrapper[4792]: I0929 19:09:23.528341 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-vmlff" Sep 29 19:09:23 crc kubenswrapper[4792]: I0929 19:09:23.529075 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-vmlff" Sep 29 19:09:23 crc kubenswrapper[4792]: I0929 19:09:23.599788 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-vmlff" Sep 29 19:09:23 crc kubenswrapper[4792]: I0929 19:09:23.753423 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-vmlff" Sep 29 19:09:23 crc kubenswrapper[4792]: I0929 19:09:23.832222 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-vmlff"] Sep 29 19:09:25 crc kubenswrapper[4792]: I0929 19:09:25.728341 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-vmlff" podUID="78fca59f-c5f2-4558-ab07-122008a02f73" containerName="registry-server" containerID="cri-o://307215ec37ae63dd5bd199e3a4c4d12699c24f26bc77e7dcf46cbc6e4af924c6" gracePeriod=2 Sep 29 19:09:26 crc kubenswrapper[4792]: I0929 19:09:26.165300 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-nmstate/nmstate-webhook-6d689559c5-9gcgx" Sep 29 19:09:26 crc kubenswrapper[4792]: I0929 19:09:26.267876 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-vmlff" Sep 29 19:09:26 crc kubenswrapper[4792]: I0929 19:09:26.314757 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/78fca59f-c5f2-4558-ab07-122008a02f73-catalog-content\") pod \"78fca59f-c5f2-4558-ab07-122008a02f73\" (UID: \"78fca59f-c5f2-4558-ab07-122008a02f73\") " Sep 29 19:09:26 crc kubenswrapper[4792]: I0929 19:09:26.314831 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/78fca59f-c5f2-4558-ab07-122008a02f73-utilities\") pod \"78fca59f-c5f2-4558-ab07-122008a02f73\" (UID: \"78fca59f-c5f2-4558-ab07-122008a02f73\") " Sep 29 19:09:26 crc kubenswrapper[4792]: I0929 19:09:26.314885 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7q74s\" (UniqueName: \"kubernetes.io/projected/78fca59f-c5f2-4558-ab07-122008a02f73-kube-api-access-7q74s\") pod \"78fca59f-c5f2-4558-ab07-122008a02f73\" (UID: \"78fca59f-c5f2-4558-ab07-122008a02f73\") " Sep 29 19:09:26 crc kubenswrapper[4792]: I0929 19:09:26.316408 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/78fca59f-c5f2-4558-ab07-122008a02f73-utilities" (OuterVolumeSpecName: "utilities") pod "78fca59f-c5f2-4558-ab07-122008a02f73" (UID: "78fca59f-c5f2-4558-ab07-122008a02f73"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 19:09:26 crc kubenswrapper[4792]: I0929 19:09:26.327256 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/78fca59f-c5f2-4558-ab07-122008a02f73-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "78fca59f-c5f2-4558-ab07-122008a02f73" (UID: "78fca59f-c5f2-4558-ab07-122008a02f73"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 19:09:26 crc kubenswrapper[4792]: I0929 19:09:26.333095 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/78fca59f-c5f2-4558-ab07-122008a02f73-kube-api-access-7q74s" (OuterVolumeSpecName: "kube-api-access-7q74s") pod "78fca59f-c5f2-4558-ab07-122008a02f73" (UID: "78fca59f-c5f2-4558-ab07-122008a02f73"). InnerVolumeSpecName "kube-api-access-7q74s". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:09:26 crc kubenswrapper[4792]: I0929 19:09:26.416146 4792 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/78fca59f-c5f2-4558-ab07-122008a02f73-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 19:09:26 crc kubenswrapper[4792]: I0929 19:09:26.416194 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7q74s\" (UniqueName: \"kubernetes.io/projected/78fca59f-c5f2-4558-ab07-122008a02f73-kube-api-access-7q74s\") on node \"crc\" DevicePath \"\"" Sep 29 19:09:26 crc kubenswrapper[4792]: I0929 19:09:26.416205 4792 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/78fca59f-c5f2-4558-ab07-122008a02f73-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 19:09:26 crc kubenswrapper[4792]: I0929 19:09:26.735051 4792 generic.go:334] "Generic (PLEG): container finished" podID="78fca59f-c5f2-4558-ab07-122008a02f73" containerID="307215ec37ae63dd5bd199e3a4c4d12699c24f26bc77e7dcf46cbc6e4af924c6" exitCode=0 Sep 29 19:09:26 crc kubenswrapper[4792]: I0929 19:09:26.735104 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-vmlff" event={"ID":"78fca59f-c5f2-4558-ab07-122008a02f73","Type":"ContainerDied","Data":"307215ec37ae63dd5bd199e3a4c4d12699c24f26bc77e7dcf46cbc6e4af924c6"} Sep 29 19:09:26 crc kubenswrapper[4792]: I0929 19:09:26.735160 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-vmlff" Sep 29 19:09:26 crc kubenswrapper[4792]: I0929 19:09:26.735222 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-vmlff" event={"ID":"78fca59f-c5f2-4558-ab07-122008a02f73","Type":"ContainerDied","Data":"38d9684d1d8adb6e9ddae794bdbec2cb290f0cd33531e20868f169a2a61f9e93"} Sep 29 19:09:26 crc kubenswrapper[4792]: I0929 19:09:26.735252 4792 scope.go:117] "RemoveContainer" containerID="307215ec37ae63dd5bd199e3a4c4d12699c24f26bc77e7dcf46cbc6e4af924c6" Sep 29 19:09:26 crc kubenswrapper[4792]: I0929 19:09:26.753034 4792 scope.go:117] "RemoveContainer" containerID="f3b93d967869cd6ba661cd301d3da592b075c1491da570987ac7570db4accd79" Sep 29 19:09:26 crc kubenswrapper[4792]: I0929 19:09:26.809574 4792 scope.go:117] "RemoveContainer" containerID="924c78e0a56dc9493fd5a5e33cf3d9119dbe92883892ea46caae1b3afce17869" Sep 29 19:09:26 crc kubenswrapper[4792]: I0929 19:09:26.812238 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-vmlff"] Sep 29 19:09:26 crc kubenswrapper[4792]: I0929 19:09:26.815460 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-vmlff"] Sep 29 19:09:26 crc kubenswrapper[4792]: I0929 19:09:26.825871 4792 scope.go:117] "RemoveContainer" containerID="307215ec37ae63dd5bd199e3a4c4d12699c24f26bc77e7dcf46cbc6e4af924c6" Sep 29 19:09:26 crc kubenswrapper[4792]: E0929 19:09:26.826289 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"307215ec37ae63dd5bd199e3a4c4d12699c24f26bc77e7dcf46cbc6e4af924c6\": container with ID starting with 307215ec37ae63dd5bd199e3a4c4d12699c24f26bc77e7dcf46cbc6e4af924c6 not found: ID does not exist" containerID="307215ec37ae63dd5bd199e3a4c4d12699c24f26bc77e7dcf46cbc6e4af924c6" Sep 29 19:09:26 crc kubenswrapper[4792]: I0929 19:09:26.826322 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"307215ec37ae63dd5bd199e3a4c4d12699c24f26bc77e7dcf46cbc6e4af924c6"} err="failed to get container status \"307215ec37ae63dd5bd199e3a4c4d12699c24f26bc77e7dcf46cbc6e4af924c6\": rpc error: code = NotFound desc = could not find container \"307215ec37ae63dd5bd199e3a4c4d12699c24f26bc77e7dcf46cbc6e4af924c6\": container with ID starting with 307215ec37ae63dd5bd199e3a4c4d12699c24f26bc77e7dcf46cbc6e4af924c6 not found: ID does not exist" Sep 29 19:09:26 crc kubenswrapper[4792]: I0929 19:09:26.826343 4792 scope.go:117] "RemoveContainer" containerID="f3b93d967869cd6ba661cd301d3da592b075c1491da570987ac7570db4accd79" Sep 29 19:09:26 crc kubenswrapper[4792]: E0929 19:09:26.826701 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f3b93d967869cd6ba661cd301d3da592b075c1491da570987ac7570db4accd79\": container with ID starting with f3b93d967869cd6ba661cd301d3da592b075c1491da570987ac7570db4accd79 not found: ID does not exist" containerID="f3b93d967869cd6ba661cd301d3da592b075c1491da570987ac7570db4accd79" Sep 29 19:09:26 crc kubenswrapper[4792]: I0929 19:09:26.826742 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f3b93d967869cd6ba661cd301d3da592b075c1491da570987ac7570db4accd79"} err="failed to get container status \"f3b93d967869cd6ba661cd301d3da592b075c1491da570987ac7570db4accd79\": rpc error: code = NotFound desc = could not find container \"f3b93d967869cd6ba661cd301d3da592b075c1491da570987ac7570db4accd79\": container with ID starting with f3b93d967869cd6ba661cd301d3da592b075c1491da570987ac7570db4accd79 not found: ID does not exist" Sep 29 19:09:26 crc kubenswrapper[4792]: I0929 19:09:26.826758 4792 scope.go:117] "RemoveContainer" containerID="924c78e0a56dc9493fd5a5e33cf3d9119dbe92883892ea46caae1b3afce17869" Sep 29 19:09:26 crc kubenswrapper[4792]: E0929 19:09:26.827046 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"924c78e0a56dc9493fd5a5e33cf3d9119dbe92883892ea46caae1b3afce17869\": container with ID starting with 924c78e0a56dc9493fd5a5e33cf3d9119dbe92883892ea46caae1b3afce17869 not found: ID does not exist" containerID="924c78e0a56dc9493fd5a5e33cf3d9119dbe92883892ea46caae1b3afce17869" Sep 29 19:09:26 crc kubenswrapper[4792]: I0929 19:09:26.827066 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"924c78e0a56dc9493fd5a5e33cf3d9119dbe92883892ea46caae1b3afce17869"} err="failed to get container status \"924c78e0a56dc9493fd5a5e33cf3d9119dbe92883892ea46caae1b3afce17869\": rpc error: code = NotFound desc = could not find container \"924c78e0a56dc9493fd5a5e33cf3d9119dbe92883892ea46caae1b3afce17869\": container with ID starting with 924c78e0a56dc9493fd5a5e33cf3d9119dbe92883892ea46caae1b3afce17869 not found: ID does not exist" Sep 29 19:09:27 crc kubenswrapper[4792]: I0929 19:09:27.023341 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="78fca59f-c5f2-4558-ab07-122008a02f73" path="/var/lib/kubelet/pods/78fca59f-c5f2-4558-ab07-122008a02f73/volumes" Sep 29 19:09:37 crc kubenswrapper[4792]: I0929 19:09:37.929032 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96x69rv"] Sep 29 19:09:37 crc kubenswrapper[4792]: E0929 19:09:37.929745 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="78fca59f-c5f2-4558-ab07-122008a02f73" containerName="registry-server" Sep 29 19:09:37 crc kubenswrapper[4792]: I0929 19:09:37.929760 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="78fca59f-c5f2-4558-ab07-122008a02f73" containerName="registry-server" Sep 29 19:09:37 crc kubenswrapper[4792]: E0929 19:09:37.929781 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="78fca59f-c5f2-4558-ab07-122008a02f73" containerName="extract-utilities" Sep 29 19:09:37 crc kubenswrapper[4792]: I0929 19:09:37.929788 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="78fca59f-c5f2-4558-ab07-122008a02f73" containerName="extract-utilities" Sep 29 19:09:37 crc kubenswrapper[4792]: E0929 19:09:37.929802 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="78fca59f-c5f2-4558-ab07-122008a02f73" containerName="extract-content" Sep 29 19:09:37 crc kubenswrapper[4792]: I0929 19:09:37.929810 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="78fca59f-c5f2-4558-ab07-122008a02f73" containerName="extract-content" Sep 29 19:09:37 crc kubenswrapper[4792]: I0929 19:09:37.929953 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="78fca59f-c5f2-4558-ab07-122008a02f73" containerName="registry-server" Sep 29 19:09:37 crc kubenswrapper[4792]: I0929 19:09:37.930798 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96x69rv" Sep 29 19:09:37 crc kubenswrapper[4792]: I0929 19:09:37.933251 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Sep 29 19:09:37 crc kubenswrapper[4792]: I0929 19:09:37.948033 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96x69rv"] Sep 29 19:09:37 crc kubenswrapper[4792]: I0929 19:09:37.977064 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vtkgt\" (UniqueName: \"kubernetes.io/projected/d52e4791-ce38-457b-a2b2-83e5a4f491ab-kube-api-access-vtkgt\") pod \"f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96x69rv\" (UID: \"d52e4791-ce38-457b-a2b2-83e5a4f491ab\") " pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96x69rv" Sep 29 19:09:37 crc kubenswrapper[4792]: I0929 19:09:37.977134 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/d52e4791-ce38-457b-a2b2-83e5a4f491ab-util\") pod \"f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96x69rv\" (UID: \"d52e4791-ce38-457b-a2b2-83e5a4f491ab\") " pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96x69rv" Sep 29 19:09:37 crc kubenswrapper[4792]: I0929 19:09:37.977291 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/d52e4791-ce38-457b-a2b2-83e5a4f491ab-bundle\") pod \"f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96x69rv\" (UID: \"d52e4791-ce38-457b-a2b2-83e5a4f491ab\") " pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96x69rv" Sep 29 19:09:38 crc kubenswrapper[4792]: I0929 19:09:38.078239 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/d52e4791-ce38-457b-a2b2-83e5a4f491ab-bundle\") pod \"f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96x69rv\" (UID: \"d52e4791-ce38-457b-a2b2-83e5a4f491ab\") " pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96x69rv" Sep 29 19:09:38 crc kubenswrapper[4792]: I0929 19:09:38.078317 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vtkgt\" (UniqueName: \"kubernetes.io/projected/d52e4791-ce38-457b-a2b2-83e5a4f491ab-kube-api-access-vtkgt\") pod \"f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96x69rv\" (UID: \"d52e4791-ce38-457b-a2b2-83e5a4f491ab\") " pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96x69rv" Sep 29 19:09:38 crc kubenswrapper[4792]: I0929 19:09:38.078349 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/d52e4791-ce38-457b-a2b2-83e5a4f491ab-util\") pod \"f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96x69rv\" (UID: \"d52e4791-ce38-457b-a2b2-83e5a4f491ab\") " pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96x69rv" Sep 29 19:09:38 crc kubenswrapper[4792]: I0929 19:09:38.078758 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/d52e4791-ce38-457b-a2b2-83e5a4f491ab-bundle\") pod \"f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96x69rv\" (UID: \"d52e4791-ce38-457b-a2b2-83e5a4f491ab\") " pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96x69rv" Sep 29 19:09:38 crc kubenswrapper[4792]: I0929 19:09:38.078825 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/d52e4791-ce38-457b-a2b2-83e5a4f491ab-util\") pod \"f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96x69rv\" (UID: \"d52e4791-ce38-457b-a2b2-83e5a4f491ab\") " pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96x69rv" Sep 29 19:09:38 crc kubenswrapper[4792]: I0929 19:09:38.096570 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vtkgt\" (UniqueName: \"kubernetes.io/projected/d52e4791-ce38-457b-a2b2-83e5a4f491ab-kube-api-access-vtkgt\") pod \"f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96x69rv\" (UID: \"d52e4791-ce38-457b-a2b2-83e5a4f491ab\") " pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96x69rv" Sep 29 19:09:38 crc kubenswrapper[4792]: I0929 19:09:38.247592 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96x69rv" Sep 29 19:09:38 crc kubenswrapper[4792]: I0929 19:09:38.670073 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96x69rv"] Sep 29 19:09:38 crc kubenswrapper[4792]: I0929 19:09:38.807274 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96x69rv" event={"ID":"d52e4791-ce38-457b-a2b2-83e5a4f491ab","Type":"ContainerStarted","Data":"4a72aea4893bb1936a75c9f23b6d6c5505309f89fcce843b0a96d034c644e9be"} Sep 29 19:09:38 crc kubenswrapper[4792]: I0929 19:09:38.807321 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96x69rv" event={"ID":"d52e4791-ce38-457b-a2b2-83e5a4f491ab","Type":"ContainerStarted","Data":"3ad36259f49962392e6709589f44ab1f46020bc04f84062734036d28c724e37d"} Sep 29 19:09:39 crc kubenswrapper[4792]: I0929 19:09:39.676030 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-zx66r"] Sep 29 19:09:39 crc kubenswrapper[4792]: I0929 19:09:39.677391 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-zx66r" Sep 29 19:09:39 crc kubenswrapper[4792]: I0929 19:09:39.680606 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-zx66r"] Sep 29 19:09:39 crc kubenswrapper[4792]: I0929 19:09:39.802435 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8gvlp\" (UniqueName: \"kubernetes.io/projected/af532e48-fb5d-4167-8ead-6a8e82497877-kube-api-access-8gvlp\") pod \"redhat-operators-zx66r\" (UID: \"af532e48-fb5d-4167-8ead-6a8e82497877\") " pod="openshift-marketplace/redhat-operators-zx66r" Sep 29 19:09:39 crc kubenswrapper[4792]: I0929 19:09:39.803270 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/af532e48-fb5d-4167-8ead-6a8e82497877-catalog-content\") pod \"redhat-operators-zx66r\" (UID: \"af532e48-fb5d-4167-8ead-6a8e82497877\") " pod="openshift-marketplace/redhat-operators-zx66r" Sep 29 19:09:39 crc kubenswrapper[4792]: I0929 19:09:39.803462 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/af532e48-fb5d-4167-8ead-6a8e82497877-utilities\") pod \"redhat-operators-zx66r\" (UID: \"af532e48-fb5d-4167-8ead-6a8e82497877\") " pod="openshift-marketplace/redhat-operators-zx66r" Sep 29 19:09:39 crc kubenswrapper[4792]: I0929 19:09:39.814278 4792 generic.go:334] "Generic (PLEG): container finished" podID="d52e4791-ce38-457b-a2b2-83e5a4f491ab" containerID="4a72aea4893bb1936a75c9f23b6d6c5505309f89fcce843b0a96d034c644e9be" exitCode=0 Sep 29 19:09:39 crc kubenswrapper[4792]: I0929 19:09:39.814409 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96x69rv" event={"ID":"d52e4791-ce38-457b-a2b2-83e5a4f491ab","Type":"ContainerDied","Data":"4a72aea4893bb1936a75c9f23b6d6c5505309f89fcce843b0a96d034c644e9be"} Sep 29 19:09:39 crc kubenswrapper[4792]: I0929 19:09:39.904164 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8gvlp\" (UniqueName: \"kubernetes.io/projected/af532e48-fb5d-4167-8ead-6a8e82497877-kube-api-access-8gvlp\") pod \"redhat-operators-zx66r\" (UID: \"af532e48-fb5d-4167-8ead-6a8e82497877\") " pod="openshift-marketplace/redhat-operators-zx66r" Sep 29 19:09:39 crc kubenswrapper[4792]: I0929 19:09:39.904218 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/af532e48-fb5d-4167-8ead-6a8e82497877-catalog-content\") pod \"redhat-operators-zx66r\" (UID: \"af532e48-fb5d-4167-8ead-6a8e82497877\") " pod="openshift-marketplace/redhat-operators-zx66r" Sep 29 19:09:39 crc kubenswrapper[4792]: I0929 19:09:39.904275 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/af532e48-fb5d-4167-8ead-6a8e82497877-utilities\") pod \"redhat-operators-zx66r\" (UID: \"af532e48-fb5d-4167-8ead-6a8e82497877\") " pod="openshift-marketplace/redhat-operators-zx66r" Sep 29 19:09:39 crc kubenswrapper[4792]: I0929 19:09:39.904702 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/af532e48-fb5d-4167-8ead-6a8e82497877-catalog-content\") pod \"redhat-operators-zx66r\" (UID: \"af532e48-fb5d-4167-8ead-6a8e82497877\") " pod="openshift-marketplace/redhat-operators-zx66r" Sep 29 19:09:39 crc kubenswrapper[4792]: I0929 19:09:39.905552 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/af532e48-fb5d-4167-8ead-6a8e82497877-utilities\") pod \"redhat-operators-zx66r\" (UID: \"af532e48-fb5d-4167-8ead-6a8e82497877\") " pod="openshift-marketplace/redhat-operators-zx66r" Sep 29 19:09:39 crc kubenswrapper[4792]: I0929 19:09:39.937143 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8gvlp\" (UniqueName: \"kubernetes.io/projected/af532e48-fb5d-4167-8ead-6a8e82497877-kube-api-access-8gvlp\") pod \"redhat-operators-zx66r\" (UID: \"af532e48-fb5d-4167-8ead-6a8e82497877\") " pod="openshift-marketplace/redhat-operators-zx66r" Sep 29 19:09:40 crc kubenswrapper[4792]: I0929 19:09:40.004576 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-zx66r" Sep 29 19:09:40 crc kubenswrapper[4792]: I0929 19:09:40.416581 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-zx66r"] Sep 29 19:09:40 crc kubenswrapper[4792]: W0929 19:09:40.427408 4792 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podaf532e48_fb5d_4167_8ead_6a8e82497877.slice/crio-70c8aaa2f2cc420cacd565de09d3ae47134c619b3bfc6e8b35631679a780ecb9 WatchSource:0}: Error finding container 70c8aaa2f2cc420cacd565de09d3ae47134c619b3bfc6e8b35631679a780ecb9: Status 404 returned error can't find the container with id 70c8aaa2f2cc420cacd565de09d3ae47134c619b3bfc6e8b35631679a780ecb9 Sep 29 19:09:40 crc kubenswrapper[4792]: I0929 19:09:40.820452 4792 generic.go:334] "Generic (PLEG): container finished" podID="af532e48-fb5d-4167-8ead-6a8e82497877" containerID="99587f1ccc5d25a98665a2c6da3817e696da9d6716b225a02758c90ff2400a49" exitCode=0 Sep 29 19:09:40 crc kubenswrapper[4792]: I0929 19:09:40.820652 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-zx66r" event={"ID":"af532e48-fb5d-4167-8ead-6a8e82497877","Type":"ContainerDied","Data":"99587f1ccc5d25a98665a2c6da3817e696da9d6716b225a02758c90ff2400a49"} Sep 29 19:09:40 crc kubenswrapper[4792]: I0929 19:09:40.820756 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-zx66r" event={"ID":"af532e48-fb5d-4167-8ead-6a8e82497877","Type":"ContainerStarted","Data":"70c8aaa2f2cc420cacd565de09d3ae47134c619b3bfc6e8b35631679a780ecb9"} Sep 29 19:09:41 crc kubenswrapper[4792]: I0929 19:09:41.796593 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-console/console-f9d7485db-7pt7w" podUID="577a8444-c6e3-4aae-922e-12c7cb3b0b11" containerName="console" containerID="cri-o://1ec6d316d0d34bf6dc311af24fdcf882f67e1166daf49cc16e49038e1c70aa4f" gracePeriod=15 Sep 29 19:09:41 crc kubenswrapper[4792]: I0929 19:09:41.959186 4792 patch_prober.go:28] interesting pod/machine-config-daemon-p5q59 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 19:09:41 crc kubenswrapper[4792]: I0929 19:09:41.959495 4792 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" podUID="0ae66548-086e-4ca9-bd6f-281ce46e7557" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 19:09:42 crc kubenswrapper[4792]: I0929 19:09:42.406267 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-console_console-f9d7485db-7pt7w_577a8444-c6e3-4aae-922e-12c7cb3b0b11/console/0.log" Sep 29 19:09:42 crc kubenswrapper[4792]: I0929 19:09:42.406325 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-7pt7w" Sep 29 19:09:42 crc kubenswrapper[4792]: I0929 19:09:42.533206 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/577a8444-c6e3-4aae-922e-12c7cb3b0b11-console-serving-cert\") pod \"577a8444-c6e3-4aae-922e-12c7cb3b0b11\" (UID: \"577a8444-c6e3-4aae-922e-12c7cb3b0b11\") " Sep 29 19:09:42 crc kubenswrapper[4792]: I0929 19:09:42.533253 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/577a8444-c6e3-4aae-922e-12c7cb3b0b11-trusted-ca-bundle\") pod \"577a8444-c6e3-4aae-922e-12c7cb3b0b11\" (UID: \"577a8444-c6e3-4aae-922e-12c7cb3b0b11\") " Sep 29 19:09:42 crc kubenswrapper[4792]: I0929 19:09:42.533298 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/577a8444-c6e3-4aae-922e-12c7cb3b0b11-oauth-serving-cert\") pod \"577a8444-c6e3-4aae-922e-12c7cb3b0b11\" (UID: \"577a8444-c6e3-4aae-922e-12c7cb3b0b11\") " Sep 29 19:09:42 crc kubenswrapper[4792]: I0929 19:09:42.533328 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4hdqg\" (UniqueName: \"kubernetes.io/projected/577a8444-c6e3-4aae-922e-12c7cb3b0b11-kube-api-access-4hdqg\") pod \"577a8444-c6e3-4aae-922e-12c7cb3b0b11\" (UID: \"577a8444-c6e3-4aae-922e-12c7cb3b0b11\") " Sep 29 19:09:42 crc kubenswrapper[4792]: I0929 19:09:42.533343 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/577a8444-c6e3-4aae-922e-12c7cb3b0b11-console-oauth-config\") pod \"577a8444-c6e3-4aae-922e-12c7cb3b0b11\" (UID: \"577a8444-c6e3-4aae-922e-12c7cb3b0b11\") " Sep 29 19:09:42 crc kubenswrapper[4792]: I0929 19:09:42.533358 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/577a8444-c6e3-4aae-922e-12c7cb3b0b11-service-ca\") pod \"577a8444-c6e3-4aae-922e-12c7cb3b0b11\" (UID: \"577a8444-c6e3-4aae-922e-12c7cb3b0b11\") " Sep 29 19:09:42 crc kubenswrapper[4792]: I0929 19:09:42.533392 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/577a8444-c6e3-4aae-922e-12c7cb3b0b11-console-config\") pod \"577a8444-c6e3-4aae-922e-12c7cb3b0b11\" (UID: \"577a8444-c6e3-4aae-922e-12c7cb3b0b11\") " Sep 29 19:09:42 crc kubenswrapper[4792]: I0929 19:09:42.533824 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/577a8444-c6e3-4aae-922e-12c7cb3b0b11-oauth-serving-cert" (OuterVolumeSpecName: "oauth-serving-cert") pod "577a8444-c6e3-4aae-922e-12c7cb3b0b11" (UID: "577a8444-c6e3-4aae-922e-12c7cb3b0b11"). InnerVolumeSpecName "oauth-serving-cert". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:09:42 crc kubenswrapper[4792]: I0929 19:09:42.533901 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/577a8444-c6e3-4aae-922e-12c7cb3b0b11-console-config" (OuterVolumeSpecName: "console-config") pod "577a8444-c6e3-4aae-922e-12c7cb3b0b11" (UID: "577a8444-c6e3-4aae-922e-12c7cb3b0b11"). InnerVolumeSpecName "console-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:09:42 crc kubenswrapper[4792]: I0929 19:09:42.534117 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/577a8444-c6e3-4aae-922e-12c7cb3b0b11-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "577a8444-c6e3-4aae-922e-12c7cb3b0b11" (UID: "577a8444-c6e3-4aae-922e-12c7cb3b0b11"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:09:42 crc kubenswrapper[4792]: I0929 19:09:42.534343 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/577a8444-c6e3-4aae-922e-12c7cb3b0b11-service-ca" (OuterVolumeSpecName: "service-ca") pod "577a8444-c6e3-4aae-922e-12c7cb3b0b11" (UID: "577a8444-c6e3-4aae-922e-12c7cb3b0b11"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:09:42 crc kubenswrapper[4792]: I0929 19:09:42.538117 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/577a8444-c6e3-4aae-922e-12c7cb3b0b11-console-serving-cert" (OuterVolumeSpecName: "console-serving-cert") pod "577a8444-c6e3-4aae-922e-12c7cb3b0b11" (UID: "577a8444-c6e3-4aae-922e-12c7cb3b0b11"). InnerVolumeSpecName "console-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:09:42 crc kubenswrapper[4792]: I0929 19:09:42.538314 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/577a8444-c6e3-4aae-922e-12c7cb3b0b11-kube-api-access-4hdqg" (OuterVolumeSpecName: "kube-api-access-4hdqg") pod "577a8444-c6e3-4aae-922e-12c7cb3b0b11" (UID: "577a8444-c6e3-4aae-922e-12c7cb3b0b11"). InnerVolumeSpecName "kube-api-access-4hdqg". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:09:42 crc kubenswrapper[4792]: I0929 19:09:42.538988 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/577a8444-c6e3-4aae-922e-12c7cb3b0b11-console-oauth-config" (OuterVolumeSpecName: "console-oauth-config") pod "577a8444-c6e3-4aae-922e-12c7cb3b0b11" (UID: "577a8444-c6e3-4aae-922e-12c7cb3b0b11"). InnerVolumeSpecName "console-oauth-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:09:42 crc kubenswrapper[4792]: I0929 19:09:42.635286 4792 reconciler_common.go:293] "Volume detached for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/577a8444-c6e3-4aae-922e-12c7cb3b0b11-console-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 29 19:09:42 crc kubenswrapper[4792]: I0929 19:09:42.635530 4792 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/577a8444-c6e3-4aae-922e-12c7cb3b0b11-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 19:09:42 crc kubenswrapper[4792]: I0929 19:09:42.635613 4792 reconciler_common.go:293] "Volume detached for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/577a8444-c6e3-4aae-922e-12c7cb3b0b11-oauth-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 29 19:09:42 crc kubenswrapper[4792]: I0929 19:09:42.635697 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4hdqg\" (UniqueName: \"kubernetes.io/projected/577a8444-c6e3-4aae-922e-12c7cb3b0b11-kube-api-access-4hdqg\") on node \"crc\" DevicePath \"\"" Sep 29 19:09:42 crc kubenswrapper[4792]: I0929 19:09:42.635781 4792 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/577a8444-c6e3-4aae-922e-12c7cb3b0b11-service-ca\") on node \"crc\" DevicePath \"\"" Sep 29 19:09:42 crc kubenswrapper[4792]: I0929 19:09:42.635914 4792 reconciler_common.go:293] "Volume detached for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/577a8444-c6e3-4aae-922e-12c7cb3b0b11-console-oauth-config\") on node \"crc\" DevicePath \"\"" Sep 29 19:09:42 crc kubenswrapper[4792]: I0929 19:09:42.636405 4792 reconciler_common.go:293] "Volume detached for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/577a8444-c6e3-4aae-922e-12c7cb3b0b11-console-config\") on node \"crc\" DevicePath \"\"" Sep 29 19:09:42 crc kubenswrapper[4792]: I0929 19:09:42.833111 4792 generic.go:334] "Generic (PLEG): container finished" podID="d52e4791-ce38-457b-a2b2-83e5a4f491ab" containerID="50ebc5b455363a0cda96bf03dfe5bb92285d18b52b6e345f53f4de02e7011684" exitCode=0 Sep 29 19:09:42 crc kubenswrapper[4792]: I0929 19:09:42.833178 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96x69rv" event={"ID":"d52e4791-ce38-457b-a2b2-83e5a4f491ab","Type":"ContainerDied","Data":"50ebc5b455363a0cda96bf03dfe5bb92285d18b52b6e345f53f4de02e7011684"} Sep 29 19:09:42 crc kubenswrapper[4792]: I0929 19:09:42.834354 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-console_console-f9d7485db-7pt7w_577a8444-c6e3-4aae-922e-12c7cb3b0b11/console/0.log" Sep 29 19:09:42 crc kubenswrapper[4792]: I0929 19:09:42.834376 4792 generic.go:334] "Generic (PLEG): container finished" podID="577a8444-c6e3-4aae-922e-12c7cb3b0b11" containerID="1ec6d316d0d34bf6dc311af24fdcf882f67e1166daf49cc16e49038e1c70aa4f" exitCode=2 Sep 29 19:09:42 crc kubenswrapper[4792]: I0929 19:09:42.834407 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-7pt7w" event={"ID":"577a8444-c6e3-4aae-922e-12c7cb3b0b11","Type":"ContainerDied","Data":"1ec6d316d0d34bf6dc311af24fdcf882f67e1166daf49cc16e49038e1c70aa4f"} Sep 29 19:09:42 crc kubenswrapper[4792]: I0929 19:09:42.834422 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-7pt7w" event={"ID":"577a8444-c6e3-4aae-922e-12c7cb3b0b11","Type":"ContainerDied","Data":"6106f5fd9165a2bdf2320371319e6c8d2d61ab4e1e6c4a33baa1a28e032ba099"} Sep 29 19:09:42 crc kubenswrapper[4792]: I0929 19:09:42.834437 4792 scope.go:117] "RemoveContainer" containerID="1ec6d316d0d34bf6dc311af24fdcf882f67e1166daf49cc16e49038e1c70aa4f" Sep 29 19:09:42 crc kubenswrapper[4792]: I0929 19:09:42.834527 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-7pt7w" Sep 29 19:09:42 crc kubenswrapper[4792]: I0929 19:09:42.839153 4792 generic.go:334] "Generic (PLEG): container finished" podID="af532e48-fb5d-4167-8ead-6a8e82497877" containerID="30a76b497aa1f7f2018d3614bc8ceeba022aec0f79b8c193def29969924e0f93" exitCode=0 Sep 29 19:09:42 crc kubenswrapper[4792]: I0929 19:09:42.839189 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-zx66r" event={"ID":"af532e48-fb5d-4167-8ead-6a8e82497877","Type":"ContainerDied","Data":"30a76b497aa1f7f2018d3614bc8ceeba022aec0f79b8c193def29969924e0f93"} Sep 29 19:09:42 crc kubenswrapper[4792]: I0929 19:09:42.872367 4792 scope.go:117] "RemoveContainer" containerID="1ec6d316d0d34bf6dc311af24fdcf882f67e1166daf49cc16e49038e1c70aa4f" Sep 29 19:09:42 crc kubenswrapper[4792]: E0929 19:09:42.873389 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1ec6d316d0d34bf6dc311af24fdcf882f67e1166daf49cc16e49038e1c70aa4f\": container with ID starting with 1ec6d316d0d34bf6dc311af24fdcf882f67e1166daf49cc16e49038e1c70aa4f not found: ID does not exist" containerID="1ec6d316d0d34bf6dc311af24fdcf882f67e1166daf49cc16e49038e1c70aa4f" Sep 29 19:09:42 crc kubenswrapper[4792]: I0929 19:09:42.873438 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1ec6d316d0d34bf6dc311af24fdcf882f67e1166daf49cc16e49038e1c70aa4f"} err="failed to get container status \"1ec6d316d0d34bf6dc311af24fdcf882f67e1166daf49cc16e49038e1c70aa4f\": rpc error: code = NotFound desc = could not find container \"1ec6d316d0d34bf6dc311af24fdcf882f67e1166daf49cc16e49038e1c70aa4f\": container with ID starting with 1ec6d316d0d34bf6dc311af24fdcf882f67e1166daf49cc16e49038e1c70aa4f not found: ID does not exist" Sep 29 19:09:42 crc kubenswrapper[4792]: I0929 19:09:42.896241 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-console/console-f9d7485db-7pt7w"] Sep 29 19:09:42 crc kubenswrapper[4792]: I0929 19:09:42.899799 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-console/console-f9d7485db-7pt7w"] Sep 29 19:09:43 crc kubenswrapper[4792]: I0929 19:09:43.022616 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="577a8444-c6e3-4aae-922e-12c7cb3b0b11" path="/var/lib/kubelet/pods/577a8444-c6e3-4aae-922e-12c7cb3b0b11/volumes" Sep 29 19:09:43 crc kubenswrapper[4792]: I0929 19:09:43.845869 4792 generic.go:334] "Generic (PLEG): container finished" podID="d52e4791-ce38-457b-a2b2-83e5a4f491ab" containerID="959e0a66ac29145af783aaa22bcebff5b2b2282fc98bcc3277db6962620ca3fb" exitCode=0 Sep 29 19:09:43 crc kubenswrapper[4792]: I0929 19:09:43.845935 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96x69rv" event={"ID":"d52e4791-ce38-457b-a2b2-83e5a4f491ab","Type":"ContainerDied","Data":"959e0a66ac29145af783aaa22bcebff5b2b2282fc98bcc3277db6962620ca3fb"} Sep 29 19:09:44 crc kubenswrapper[4792]: I0929 19:09:44.857682 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-zx66r" event={"ID":"af532e48-fb5d-4167-8ead-6a8e82497877","Type":"ContainerStarted","Data":"615e09fc6772dfc2227ab4667746dd4e820ad115ee9fd9c0b11e10bdc23aca0b"} Sep 29 19:09:44 crc kubenswrapper[4792]: I0929 19:09:44.890974 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-zx66r" podStartSLOduration=3.059452306 podStartE2EDuration="5.890939933s" podCreationTimestamp="2025-09-29 19:09:39 +0000 UTC" firstStartedPulling="2025-09-29 19:09:40.822158392 +0000 UTC m=+792.815465798" lastFinishedPulling="2025-09-29 19:09:43.653646029 +0000 UTC m=+795.646953425" observedRunningTime="2025-09-29 19:09:44.880886147 +0000 UTC m=+796.874193563" watchObservedRunningTime="2025-09-29 19:09:44.890939933 +0000 UTC m=+796.884247369" Sep 29 19:09:45 crc kubenswrapper[4792]: I0929 19:09:45.145205 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96x69rv" Sep 29 19:09:45 crc kubenswrapper[4792]: I0929 19:09:45.267980 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vtkgt\" (UniqueName: \"kubernetes.io/projected/d52e4791-ce38-457b-a2b2-83e5a4f491ab-kube-api-access-vtkgt\") pod \"d52e4791-ce38-457b-a2b2-83e5a4f491ab\" (UID: \"d52e4791-ce38-457b-a2b2-83e5a4f491ab\") " Sep 29 19:09:45 crc kubenswrapper[4792]: I0929 19:09:45.268048 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/d52e4791-ce38-457b-a2b2-83e5a4f491ab-bundle\") pod \"d52e4791-ce38-457b-a2b2-83e5a4f491ab\" (UID: \"d52e4791-ce38-457b-a2b2-83e5a4f491ab\") " Sep 29 19:09:45 crc kubenswrapper[4792]: I0929 19:09:45.268081 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/d52e4791-ce38-457b-a2b2-83e5a4f491ab-util\") pod \"d52e4791-ce38-457b-a2b2-83e5a4f491ab\" (UID: \"d52e4791-ce38-457b-a2b2-83e5a4f491ab\") " Sep 29 19:09:45 crc kubenswrapper[4792]: I0929 19:09:45.268748 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d52e4791-ce38-457b-a2b2-83e5a4f491ab-bundle" (OuterVolumeSpecName: "bundle") pod "d52e4791-ce38-457b-a2b2-83e5a4f491ab" (UID: "d52e4791-ce38-457b-a2b2-83e5a4f491ab"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 19:09:45 crc kubenswrapper[4792]: I0929 19:09:45.268964 4792 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/d52e4791-ce38-457b-a2b2-83e5a4f491ab-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 19:09:45 crc kubenswrapper[4792]: I0929 19:09:45.277374 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d52e4791-ce38-457b-a2b2-83e5a4f491ab-util" (OuterVolumeSpecName: "util") pod "d52e4791-ce38-457b-a2b2-83e5a4f491ab" (UID: "d52e4791-ce38-457b-a2b2-83e5a4f491ab"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 19:09:45 crc kubenswrapper[4792]: I0929 19:09:45.282073 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d52e4791-ce38-457b-a2b2-83e5a4f491ab-kube-api-access-vtkgt" (OuterVolumeSpecName: "kube-api-access-vtkgt") pod "d52e4791-ce38-457b-a2b2-83e5a4f491ab" (UID: "d52e4791-ce38-457b-a2b2-83e5a4f491ab"). InnerVolumeSpecName "kube-api-access-vtkgt". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:09:45 crc kubenswrapper[4792]: I0929 19:09:45.370196 4792 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/d52e4791-ce38-457b-a2b2-83e5a4f491ab-util\") on node \"crc\" DevicePath \"\"" Sep 29 19:09:45 crc kubenswrapper[4792]: I0929 19:09:45.370244 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vtkgt\" (UniqueName: \"kubernetes.io/projected/d52e4791-ce38-457b-a2b2-83e5a4f491ab-kube-api-access-vtkgt\") on node \"crc\" DevicePath \"\"" Sep 29 19:09:45 crc kubenswrapper[4792]: I0929 19:09:45.874931 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96x69rv" Sep 29 19:09:45 crc kubenswrapper[4792]: I0929 19:09:45.874931 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96x69rv" event={"ID":"d52e4791-ce38-457b-a2b2-83e5a4f491ab","Type":"ContainerDied","Data":"3ad36259f49962392e6709589f44ab1f46020bc04f84062734036d28c724e37d"} Sep 29 19:09:45 crc kubenswrapper[4792]: I0929 19:09:45.874975 4792 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3ad36259f49962392e6709589f44ab1f46020bc04f84062734036d28c724e37d" Sep 29 19:09:50 crc kubenswrapper[4792]: I0929 19:09:50.004893 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-zx66r" Sep 29 19:09:50 crc kubenswrapper[4792]: I0929 19:09:50.006532 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-zx66r" Sep 29 19:09:50 crc kubenswrapper[4792]: I0929 19:09:50.064569 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-zx66r" Sep 29 19:09:50 crc kubenswrapper[4792]: I0929 19:09:50.941274 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-zx66r" Sep 29 19:09:53 crc kubenswrapper[4792]: I0929 19:09:53.259590 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-zx66r"] Sep 29 19:09:53 crc kubenswrapper[4792]: I0929 19:09:53.466896 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-nd494"] Sep 29 19:09:53 crc kubenswrapper[4792]: E0929 19:09:53.467123 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="577a8444-c6e3-4aae-922e-12c7cb3b0b11" containerName="console" Sep 29 19:09:53 crc kubenswrapper[4792]: I0929 19:09:53.467152 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="577a8444-c6e3-4aae-922e-12c7cb3b0b11" containerName="console" Sep 29 19:09:53 crc kubenswrapper[4792]: E0929 19:09:53.467171 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d52e4791-ce38-457b-a2b2-83e5a4f491ab" containerName="util" Sep 29 19:09:53 crc kubenswrapper[4792]: I0929 19:09:53.467178 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="d52e4791-ce38-457b-a2b2-83e5a4f491ab" containerName="util" Sep 29 19:09:53 crc kubenswrapper[4792]: E0929 19:09:53.467218 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d52e4791-ce38-457b-a2b2-83e5a4f491ab" containerName="pull" Sep 29 19:09:53 crc kubenswrapper[4792]: I0929 19:09:53.467229 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="d52e4791-ce38-457b-a2b2-83e5a4f491ab" containerName="pull" Sep 29 19:09:53 crc kubenswrapper[4792]: E0929 19:09:53.467240 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d52e4791-ce38-457b-a2b2-83e5a4f491ab" containerName="extract" Sep 29 19:09:53 crc kubenswrapper[4792]: I0929 19:09:53.467247 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="d52e4791-ce38-457b-a2b2-83e5a4f491ab" containerName="extract" Sep 29 19:09:53 crc kubenswrapper[4792]: I0929 19:09:53.467367 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="d52e4791-ce38-457b-a2b2-83e5a4f491ab" containerName="extract" Sep 29 19:09:53 crc kubenswrapper[4792]: I0929 19:09:53.467386 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="577a8444-c6e3-4aae-922e-12c7cb3b0b11" containerName="console" Sep 29 19:09:53 crc kubenswrapper[4792]: I0929 19:09:53.468267 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-nd494" Sep 29 19:09:53 crc kubenswrapper[4792]: I0929 19:09:53.477472 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-nd494"] Sep 29 19:09:53 crc kubenswrapper[4792]: I0929 19:09:53.665506 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a3531622-6a61-49a4-8af2-6c13bc5d4603-utilities\") pod \"certified-operators-nd494\" (UID: \"a3531622-6a61-49a4-8af2-6c13bc5d4603\") " pod="openshift-marketplace/certified-operators-nd494" Sep 29 19:09:53 crc kubenswrapper[4792]: I0929 19:09:53.665569 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m8c8r\" (UniqueName: \"kubernetes.io/projected/a3531622-6a61-49a4-8af2-6c13bc5d4603-kube-api-access-m8c8r\") pod \"certified-operators-nd494\" (UID: \"a3531622-6a61-49a4-8af2-6c13bc5d4603\") " pod="openshift-marketplace/certified-operators-nd494" Sep 29 19:09:53 crc kubenswrapper[4792]: I0929 19:09:53.665709 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a3531622-6a61-49a4-8af2-6c13bc5d4603-catalog-content\") pod \"certified-operators-nd494\" (UID: \"a3531622-6a61-49a4-8af2-6c13bc5d4603\") " pod="openshift-marketplace/certified-operators-nd494" Sep 29 19:09:53 crc kubenswrapper[4792]: I0929 19:09:53.766694 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a3531622-6a61-49a4-8af2-6c13bc5d4603-catalog-content\") pod \"certified-operators-nd494\" (UID: \"a3531622-6a61-49a4-8af2-6c13bc5d4603\") " pod="openshift-marketplace/certified-operators-nd494" Sep 29 19:09:53 crc kubenswrapper[4792]: I0929 19:09:53.766801 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a3531622-6a61-49a4-8af2-6c13bc5d4603-utilities\") pod \"certified-operators-nd494\" (UID: \"a3531622-6a61-49a4-8af2-6c13bc5d4603\") " pod="openshift-marketplace/certified-operators-nd494" Sep 29 19:09:53 crc kubenswrapper[4792]: I0929 19:09:53.766829 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m8c8r\" (UniqueName: \"kubernetes.io/projected/a3531622-6a61-49a4-8af2-6c13bc5d4603-kube-api-access-m8c8r\") pod \"certified-operators-nd494\" (UID: \"a3531622-6a61-49a4-8af2-6c13bc5d4603\") " pod="openshift-marketplace/certified-operators-nd494" Sep 29 19:09:53 crc kubenswrapper[4792]: I0929 19:09:53.767185 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a3531622-6a61-49a4-8af2-6c13bc5d4603-catalog-content\") pod \"certified-operators-nd494\" (UID: \"a3531622-6a61-49a4-8af2-6c13bc5d4603\") " pod="openshift-marketplace/certified-operators-nd494" Sep 29 19:09:53 crc kubenswrapper[4792]: I0929 19:09:53.767238 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a3531622-6a61-49a4-8af2-6c13bc5d4603-utilities\") pod \"certified-operators-nd494\" (UID: \"a3531622-6a61-49a4-8af2-6c13bc5d4603\") " pod="openshift-marketplace/certified-operators-nd494" Sep 29 19:09:53 crc kubenswrapper[4792]: I0929 19:09:53.798564 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m8c8r\" (UniqueName: \"kubernetes.io/projected/a3531622-6a61-49a4-8af2-6c13bc5d4603-kube-api-access-m8c8r\") pod \"certified-operators-nd494\" (UID: \"a3531622-6a61-49a4-8af2-6c13bc5d4603\") " pod="openshift-marketplace/certified-operators-nd494" Sep 29 19:09:53 crc kubenswrapper[4792]: I0929 19:09:53.911569 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-zx66r" podUID="af532e48-fb5d-4167-8ead-6a8e82497877" containerName="registry-server" containerID="cri-o://615e09fc6772dfc2227ab4667746dd4e820ad115ee9fd9c0b11e10bdc23aca0b" gracePeriod=2 Sep 29 19:09:54 crc kubenswrapper[4792]: I0929 19:09:54.083418 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-nd494" Sep 29 19:09:54 crc kubenswrapper[4792]: I0929 19:09:54.339239 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-zx66r" Sep 29 19:09:54 crc kubenswrapper[4792]: I0929 19:09:54.376248 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/af532e48-fb5d-4167-8ead-6a8e82497877-utilities\") pod \"af532e48-fb5d-4167-8ead-6a8e82497877\" (UID: \"af532e48-fb5d-4167-8ead-6a8e82497877\") " Sep 29 19:09:54 crc kubenswrapper[4792]: I0929 19:09:54.376299 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8gvlp\" (UniqueName: \"kubernetes.io/projected/af532e48-fb5d-4167-8ead-6a8e82497877-kube-api-access-8gvlp\") pod \"af532e48-fb5d-4167-8ead-6a8e82497877\" (UID: \"af532e48-fb5d-4167-8ead-6a8e82497877\") " Sep 29 19:09:54 crc kubenswrapper[4792]: I0929 19:09:54.376414 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/af532e48-fb5d-4167-8ead-6a8e82497877-catalog-content\") pod \"af532e48-fb5d-4167-8ead-6a8e82497877\" (UID: \"af532e48-fb5d-4167-8ead-6a8e82497877\") " Sep 29 19:09:54 crc kubenswrapper[4792]: I0929 19:09:54.377889 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/af532e48-fb5d-4167-8ead-6a8e82497877-utilities" (OuterVolumeSpecName: "utilities") pod "af532e48-fb5d-4167-8ead-6a8e82497877" (UID: "af532e48-fb5d-4167-8ead-6a8e82497877"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 19:09:54 crc kubenswrapper[4792]: I0929 19:09:54.393159 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/af532e48-fb5d-4167-8ead-6a8e82497877-kube-api-access-8gvlp" (OuterVolumeSpecName: "kube-api-access-8gvlp") pod "af532e48-fb5d-4167-8ead-6a8e82497877" (UID: "af532e48-fb5d-4167-8ead-6a8e82497877"). InnerVolumeSpecName "kube-api-access-8gvlp". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:09:54 crc kubenswrapper[4792]: I0929 19:09:54.479151 4792 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/af532e48-fb5d-4167-8ead-6a8e82497877-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 19:09:54 crc kubenswrapper[4792]: I0929 19:09:54.479179 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8gvlp\" (UniqueName: \"kubernetes.io/projected/af532e48-fb5d-4167-8ead-6a8e82497877-kube-api-access-8gvlp\") on node \"crc\" DevicePath \"\"" Sep 29 19:09:54 crc kubenswrapper[4792]: I0929 19:09:54.501309 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/af532e48-fb5d-4167-8ead-6a8e82497877-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "af532e48-fb5d-4167-8ead-6a8e82497877" (UID: "af532e48-fb5d-4167-8ead-6a8e82497877"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 19:09:54 crc kubenswrapper[4792]: I0929 19:09:54.580702 4792 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/af532e48-fb5d-4167-8ead-6a8e82497877-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 19:09:54 crc kubenswrapper[4792]: I0929 19:09:54.626253 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-nd494"] Sep 29 19:09:54 crc kubenswrapper[4792]: I0929 19:09:54.918761 4792 generic.go:334] "Generic (PLEG): container finished" podID="af532e48-fb5d-4167-8ead-6a8e82497877" containerID="615e09fc6772dfc2227ab4667746dd4e820ad115ee9fd9c0b11e10bdc23aca0b" exitCode=0 Sep 29 19:09:54 crc kubenswrapper[4792]: I0929 19:09:54.918798 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-zx66r" event={"ID":"af532e48-fb5d-4167-8ead-6a8e82497877","Type":"ContainerDied","Data":"615e09fc6772dfc2227ab4667746dd4e820ad115ee9fd9c0b11e10bdc23aca0b"} Sep 29 19:09:54 crc kubenswrapper[4792]: I0929 19:09:54.919063 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-zx66r" event={"ID":"af532e48-fb5d-4167-8ead-6a8e82497877","Type":"ContainerDied","Data":"70c8aaa2f2cc420cacd565de09d3ae47134c619b3bfc6e8b35631679a780ecb9"} Sep 29 19:09:54 crc kubenswrapper[4792]: I0929 19:09:54.918839 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-zx66r" Sep 29 19:09:54 crc kubenswrapper[4792]: I0929 19:09:54.919081 4792 scope.go:117] "RemoveContainer" containerID="615e09fc6772dfc2227ab4667746dd4e820ad115ee9fd9c0b11e10bdc23aca0b" Sep 29 19:09:54 crc kubenswrapper[4792]: I0929 19:09:54.922141 4792 generic.go:334] "Generic (PLEG): container finished" podID="a3531622-6a61-49a4-8af2-6c13bc5d4603" containerID="b9145df8cc565a75420557e3c0e62d1a190b3a4e1dabdbe74a249df150f6171c" exitCode=0 Sep 29 19:09:54 crc kubenswrapper[4792]: I0929 19:09:54.922181 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-nd494" event={"ID":"a3531622-6a61-49a4-8af2-6c13bc5d4603","Type":"ContainerDied","Data":"b9145df8cc565a75420557e3c0e62d1a190b3a4e1dabdbe74a249df150f6171c"} Sep 29 19:09:54 crc kubenswrapper[4792]: I0929 19:09:54.922208 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-nd494" event={"ID":"a3531622-6a61-49a4-8af2-6c13bc5d4603","Type":"ContainerStarted","Data":"122071fd78ca0383b534cb9ce207094990c3ac9a9c51cd0d48acc980db1beca5"} Sep 29 19:09:54 crc kubenswrapper[4792]: I0929 19:09:54.931346 4792 scope.go:117] "RemoveContainer" containerID="30a76b497aa1f7f2018d3614bc8ceeba022aec0f79b8c193def29969924e0f93" Sep 29 19:09:54 crc kubenswrapper[4792]: I0929 19:09:54.971903 4792 scope.go:117] "RemoveContainer" containerID="99587f1ccc5d25a98665a2c6da3817e696da9d6716b225a02758c90ff2400a49" Sep 29 19:09:54 crc kubenswrapper[4792]: I0929 19:09:54.988907 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-zx66r"] Sep 29 19:09:54 crc kubenswrapper[4792]: I0929 19:09:54.993914 4792 scope.go:117] "RemoveContainer" containerID="615e09fc6772dfc2227ab4667746dd4e820ad115ee9fd9c0b11e10bdc23aca0b" Sep 29 19:09:54 crc kubenswrapper[4792]: E0929 19:09:54.998363 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"615e09fc6772dfc2227ab4667746dd4e820ad115ee9fd9c0b11e10bdc23aca0b\": container with ID starting with 615e09fc6772dfc2227ab4667746dd4e820ad115ee9fd9c0b11e10bdc23aca0b not found: ID does not exist" containerID="615e09fc6772dfc2227ab4667746dd4e820ad115ee9fd9c0b11e10bdc23aca0b" Sep 29 19:09:54 crc kubenswrapper[4792]: I0929 19:09:54.998398 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"615e09fc6772dfc2227ab4667746dd4e820ad115ee9fd9c0b11e10bdc23aca0b"} err="failed to get container status \"615e09fc6772dfc2227ab4667746dd4e820ad115ee9fd9c0b11e10bdc23aca0b\": rpc error: code = NotFound desc = could not find container \"615e09fc6772dfc2227ab4667746dd4e820ad115ee9fd9c0b11e10bdc23aca0b\": container with ID starting with 615e09fc6772dfc2227ab4667746dd4e820ad115ee9fd9c0b11e10bdc23aca0b not found: ID does not exist" Sep 29 19:09:54 crc kubenswrapper[4792]: I0929 19:09:54.998425 4792 scope.go:117] "RemoveContainer" containerID="30a76b497aa1f7f2018d3614bc8ceeba022aec0f79b8c193def29969924e0f93" Sep 29 19:09:55 crc kubenswrapper[4792]: E0929 19:09:55.000970 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"30a76b497aa1f7f2018d3614bc8ceeba022aec0f79b8c193def29969924e0f93\": container with ID starting with 30a76b497aa1f7f2018d3614bc8ceeba022aec0f79b8c193def29969924e0f93 not found: ID does not exist" containerID="30a76b497aa1f7f2018d3614bc8ceeba022aec0f79b8c193def29969924e0f93" Sep 29 19:09:55 crc kubenswrapper[4792]: I0929 19:09:55.001001 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"30a76b497aa1f7f2018d3614bc8ceeba022aec0f79b8c193def29969924e0f93"} err="failed to get container status \"30a76b497aa1f7f2018d3614bc8ceeba022aec0f79b8c193def29969924e0f93\": rpc error: code = NotFound desc = could not find container \"30a76b497aa1f7f2018d3614bc8ceeba022aec0f79b8c193def29969924e0f93\": container with ID starting with 30a76b497aa1f7f2018d3614bc8ceeba022aec0f79b8c193def29969924e0f93 not found: ID does not exist" Sep 29 19:09:55 crc kubenswrapper[4792]: I0929 19:09:55.001022 4792 scope.go:117] "RemoveContainer" containerID="99587f1ccc5d25a98665a2c6da3817e696da9d6716b225a02758c90ff2400a49" Sep 29 19:09:55 crc kubenswrapper[4792]: I0929 19:09:55.005577 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-zx66r"] Sep 29 19:09:55 crc kubenswrapper[4792]: E0929 19:09:55.007970 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"99587f1ccc5d25a98665a2c6da3817e696da9d6716b225a02758c90ff2400a49\": container with ID starting with 99587f1ccc5d25a98665a2c6da3817e696da9d6716b225a02758c90ff2400a49 not found: ID does not exist" containerID="99587f1ccc5d25a98665a2c6da3817e696da9d6716b225a02758c90ff2400a49" Sep 29 19:09:55 crc kubenswrapper[4792]: I0929 19:09:55.008049 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"99587f1ccc5d25a98665a2c6da3817e696da9d6716b225a02758c90ff2400a49"} err="failed to get container status \"99587f1ccc5d25a98665a2c6da3817e696da9d6716b225a02758c90ff2400a49\": rpc error: code = NotFound desc = could not find container \"99587f1ccc5d25a98665a2c6da3817e696da9d6716b225a02758c90ff2400a49\": container with ID starting with 99587f1ccc5d25a98665a2c6da3817e696da9d6716b225a02758c90ff2400a49 not found: ID does not exist" Sep 29 19:09:55 crc kubenswrapper[4792]: I0929 19:09:55.022705 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="af532e48-fb5d-4167-8ead-6a8e82497877" path="/var/lib/kubelet/pods/af532e48-fb5d-4167-8ead-6a8e82497877/volumes" Sep 29 19:09:55 crc kubenswrapper[4792]: I0929 19:09:55.929348 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-nd494" event={"ID":"a3531622-6a61-49a4-8af2-6c13bc5d4603","Type":"ContainerStarted","Data":"0900bf8fcc7b03813915ddd8b7b3df47a0d87e3f4462cc648598c6716130aa0d"} Sep 29 19:09:55 crc kubenswrapper[4792]: I0929 19:09:55.994763 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/metallb-operator-controller-manager-798fbb9bbf-rvlk8"] Sep 29 19:09:55 crc kubenswrapper[4792]: E0929 19:09:55.995210 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="af532e48-fb5d-4167-8ead-6a8e82497877" containerName="extract-content" Sep 29 19:09:55 crc kubenswrapper[4792]: I0929 19:09:55.995232 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="af532e48-fb5d-4167-8ead-6a8e82497877" containerName="extract-content" Sep 29 19:09:55 crc kubenswrapper[4792]: E0929 19:09:55.995249 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="af532e48-fb5d-4167-8ead-6a8e82497877" containerName="extract-utilities" Sep 29 19:09:55 crc kubenswrapper[4792]: I0929 19:09:55.995258 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="af532e48-fb5d-4167-8ead-6a8e82497877" containerName="extract-utilities" Sep 29 19:09:55 crc kubenswrapper[4792]: E0929 19:09:55.995284 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="af532e48-fb5d-4167-8ead-6a8e82497877" containerName="registry-server" Sep 29 19:09:55 crc kubenswrapper[4792]: I0929 19:09:55.995292 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="af532e48-fb5d-4167-8ead-6a8e82497877" containerName="registry-server" Sep 29 19:09:55 crc kubenswrapper[4792]: I0929 19:09:55.995402 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="af532e48-fb5d-4167-8ead-6a8e82497877" containerName="registry-server" Sep 29 19:09:55 crc kubenswrapper[4792]: I0929 19:09:55.995873 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-controller-manager-798fbb9bbf-rvlk8" Sep 29 19:09:55 crc kubenswrapper[4792]: I0929 19:09:55.999252 4792 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-webhook-server-cert" Sep 29 19:09:55 crc kubenswrapper[4792]: I0929 19:09:55.999494 4792 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-controller-manager-service-cert" Sep 29 19:09:55 crc kubenswrapper[4792]: I0929 19:09:55.999731 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"openshift-service-ca.crt" Sep 29 19:09:55 crc kubenswrapper[4792]: I0929 19:09:55.999943 4792 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"manager-account-dockercfg-kj4fj" Sep 29 19:09:56 crc kubenswrapper[4792]: I0929 19:09:56.000104 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"kube-root-ca.crt" Sep 29 19:09:56 crc kubenswrapper[4792]: I0929 19:09:56.049446 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-controller-manager-798fbb9bbf-rvlk8"] Sep 29 19:09:56 crc kubenswrapper[4792]: I0929 19:09:56.097893 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/f81e5f81-aa7e-4c65-900e-9a5929ca038b-webhook-cert\") pod \"metallb-operator-controller-manager-798fbb9bbf-rvlk8\" (UID: \"f81e5f81-aa7e-4c65-900e-9a5929ca038b\") " pod="metallb-system/metallb-operator-controller-manager-798fbb9bbf-rvlk8" Sep 29 19:09:56 crc kubenswrapper[4792]: I0929 19:09:56.098021 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/f81e5f81-aa7e-4c65-900e-9a5929ca038b-apiservice-cert\") pod \"metallb-operator-controller-manager-798fbb9bbf-rvlk8\" (UID: \"f81e5f81-aa7e-4c65-900e-9a5929ca038b\") " pod="metallb-system/metallb-operator-controller-manager-798fbb9bbf-rvlk8" Sep 29 19:09:56 crc kubenswrapper[4792]: I0929 19:09:56.098049 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mfjjm\" (UniqueName: \"kubernetes.io/projected/f81e5f81-aa7e-4c65-900e-9a5929ca038b-kube-api-access-mfjjm\") pod \"metallb-operator-controller-manager-798fbb9bbf-rvlk8\" (UID: \"f81e5f81-aa7e-4c65-900e-9a5929ca038b\") " pod="metallb-system/metallb-operator-controller-manager-798fbb9bbf-rvlk8" Sep 29 19:09:56 crc kubenswrapper[4792]: I0929 19:09:56.199102 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/f81e5f81-aa7e-4c65-900e-9a5929ca038b-apiservice-cert\") pod \"metallb-operator-controller-manager-798fbb9bbf-rvlk8\" (UID: \"f81e5f81-aa7e-4c65-900e-9a5929ca038b\") " pod="metallb-system/metallb-operator-controller-manager-798fbb9bbf-rvlk8" Sep 29 19:09:56 crc kubenswrapper[4792]: I0929 19:09:56.199611 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mfjjm\" (UniqueName: \"kubernetes.io/projected/f81e5f81-aa7e-4c65-900e-9a5929ca038b-kube-api-access-mfjjm\") pod \"metallb-operator-controller-manager-798fbb9bbf-rvlk8\" (UID: \"f81e5f81-aa7e-4c65-900e-9a5929ca038b\") " pod="metallb-system/metallb-operator-controller-manager-798fbb9bbf-rvlk8" Sep 29 19:09:56 crc kubenswrapper[4792]: I0929 19:09:56.199692 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/f81e5f81-aa7e-4c65-900e-9a5929ca038b-webhook-cert\") pod \"metallb-operator-controller-manager-798fbb9bbf-rvlk8\" (UID: \"f81e5f81-aa7e-4c65-900e-9a5929ca038b\") " pod="metallb-system/metallb-operator-controller-manager-798fbb9bbf-rvlk8" Sep 29 19:09:56 crc kubenswrapper[4792]: I0929 19:09:56.207588 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/f81e5f81-aa7e-4c65-900e-9a5929ca038b-apiservice-cert\") pod \"metallb-operator-controller-manager-798fbb9bbf-rvlk8\" (UID: \"f81e5f81-aa7e-4c65-900e-9a5929ca038b\") " pod="metallb-system/metallb-operator-controller-manager-798fbb9bbf-rvlk8" Sep 29 19:09:56 crc kubenswrapper[4792]: I0929 19:09:56.207634 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/f81e5f81-aa7e-4c65-900e-9a5929ca038b-webhook-cert\") pod \"metallb-operator-controller-manager-798fbb9bbf-rvlk8\" (UID: \"f81e5f81-aa7e-4c65-900e-9a5929ca038b\") " pod="metallb-system/metallb-operator-controller-manager-798fbb9bbf-rvlk8" Sep 29 19:09:56 crc kubenswrapper[4792]: I0929 19:09:56.246564 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mfjjm\" (UniqueName: \"kubernetes.io/projected/f81e5f81-aa7e-4c65-900e-9a5929ca038b-kube-api-access-mfjjm\") pod \"metallb-operator-controller-manager-798fbb9bbf-rvlk8\" (UID: \"f81e5f81-aa7e-4c65-900e-9a5929ca038b\") " pod="metallb-system/metallb-operator-controller-manager-798fbb9bbf-rvlk8" Sep 29 19:09:56 crc kubenswrapper[4792]: I0929 19:09:56.312193 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-controller-manager-798fbb9bbf-rvlk8" Sep 29 19:09:56 crc kubenswrapper[4792]: I0929 19:09:56.463499 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/metallb-operator-webhook-server-5888bf57-l785s"] Sep 29 19:09:56 crc kubenswrapper[4792]: I0929 19:09:56.468606 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-webhook-server-5888bf57-l785s" Sep 29 19:09:56 crc kubenswrapper[4792]: I0929 19:09:56.473507 4792 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"controller-dockercfg-gh44l" Sep 29 19:09:56 crc kubenswrapper[4792]: I0929 19:09:56.473643 4792 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-webhook-cert" Sep 29 19:09:56 crc kubenswrapper[4792]: I0929 19:09:56.478123 4792 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-webhook-server-service-cert" Sep 29 19:09:56 crc kubenswrapper[4792]: I0929 19:09:56.499583 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-webhook-server-5888bf57-l785s"] Sep 29 19:09:56 crc kubenswrapper[4792]: I0929 19:09:56.605645 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/19a31c23-7b44-4b0e-a627-1891480c5e03-webhook-cert\") pod \"metallb-operator-webhook-server-5888bf57-l785s\" (UID: \"19a31c23-7b44-4b0e-a627-1891480c5e03\") " pod="metallb-system/metallb-operator-webhook-server-5888bf57-l785s" Sep 29 19:09:56 crc kubenswrapper[4792]: I0929 19:09:56.605704 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ml4w8\" (UniqueName: \"kubernetes.io/projected/19a31c23-7b44-4b0e-a627-1891480c5e03-kube-api-access-ml4w8\") pod \"metallb-operator-webhook-server-5888bf57-l785s\" (UID: \"19a31c23-7b44-4b0e-a627-1891480c5e03\") " pod="metallb-system/metallb-operator-webhook-server-5888bf57-l785s" Sep 29 19:09:56 crc kubenswrapper[4792]: I0929 19:09:56.605741 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/19a31c23-7b44-4b0e-a627-1891480c5e03-apiservice-cert\") pod \"metallb-operator-webhook-server-5888bf57-l785s\" (UID: \"19a31c23-7b44-4b0e-a627-1891480c5e03\") " pod="metallb-system/metallb-operator-webhook-server-5888bf57-l785s" Sep 29 19:09:56 crc kubenswrapper[4792]: I0929 19:09:56.653961 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-controller-manager-798fbb9bbf-rvlk8"] Sep 29 19:09:56 crc kubenswrapper[4792]: I0929 19:09:56.706343 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/19a31c23-7b44-4b0e-a627-1891480c5e03-webhook-cert\") pod \"metallb-operator-webhook-server-5888bf57-l785s\" (UID: \"19a31c23-7b44-4b0e-a627-1891480c5e03\") " pod="metallb-system/metallb-operator-webhook-server-5888bf57-l785s" Sep 29 19:09:56 crc kubenswrapper[4792]: I0929 19:09:56.706406 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ml4w8\" (UniqueName: \"kubernetes.io/projected/19a31c23-7b44-4b0e-a627-1891480c5e03-kube-api-access-ml4w8\") pod \"metallb-operator-webhook-server-5888bf57-l785s\" (UID: \"19a31c23-7b44-4b0e-a627-1891480c5e03\") " pod="metallb-system/metallb-operator-webhook-server-5888bf57-l785s" Sep 29 19:09:56 crc kubenswrapper[4792]: I0929 19:09:56.706441 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/19a31c23-7b44-4b0e-a627-1891480c5e03-apiservice-cert\") pod \"metallb-operator-webhook-server-5888bf57-l785s\" (UID: \"19a31c23-7b44-4b0e-a627-1891480c5e03\") " pod="metallb-system/metallb-operator-webhook-server-5888bf57-l785s" Sep 29 19:09:56 crc kubenswrapper[4792]: I0929 19:09:56.710867 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/19a31c23-7b44-4b0e-a627-1891480c5e03-webhook-cert\") pod \"metallb-operator-webhook-server-5888bf57-l785s\" (UID: \"19a31c23-7b44-4b0e-a627-1891480c5e03\") " pod="metallb-system/metallb-operator-webhook-server-5888bf57-l785s" Sep 29 19:09:56 crc kubenswrapper[4792]: I0929 19:09:56.711167 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/19a31c23-7b44-4b0e-a627-1891480c5e03-apiservice-cert\") pod \"metallb-operator-webhook-server-5888bf57-l785s\" (UID: \"19a31c23-7b44-4b0e-a627-1891480c5e03\") " pod="metallb-system/metallb-operator-webhook-server-5888bf57-l785s" Sep 29 19:09:56 crc kubenswrapper[4792]: I0929 19:09:56.732583 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ml4w8\" (UniqueName: \"kubernetes.io/projected/19a31c23-7b44-4b0e-a627-1891480c5e03-kube-api-access-ml4w8\") pod \"metallb-operator-webhook-server-5888bf57-l785s\" (UID: \"19a31c23-7b44-4b0e-a627-1891480c5e03\") " pod="metallb-system/metallb-operator-webhook-server-5888bf57-l785s" Sep 29 19:09:56 crc kubenswrapper[4792]: I0929 19:09:56.787434 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-webhook-server-5888bf57-l785s" Sep 29 19:09:56 crc kubenswrapper[4792]: I0929 19:09:56.945405 4792 generic.go:334] "Generic (PLEG): container finished" podID="a3531622-6a61-49a4-8af2-6c13bc5d4603" containerID="0900bf8fcc7b03813915ddd8b7b3df47a0d87e3f4462cc648598c6716130aa0d" exitCode=0 Sep 29 19:09:56 crc kubenswrapper[4792]: I0929 19:09:56.945483 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-nd494" event={"ID":"a3531622-6a61-49a4-8af2-6c13bc5d4603","Type":"ContainerDied","Data":"0900bf8fcc7b03813915ddd8b7b3df47a0d87e3f4462cc648598c6716130aa0d"} Sep 29 19:09:56 crc kubenswrapper[4792]: I0929 19:09:56.947681 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-controller-manager-798fbb9bbf-rvlk8" event={"ID":"f81e5f81-aa7e-4c65-900e-9a5929ca038b","Type":"ContainerStarted","Data":"972fdc6f24de216495b852e79bde917560204508a0fff1317b22c6009e5e365a"} Sep 29 19:09:57 crc kubenswrapper[4792]: W0929 19:09:57.228238 4792 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod19a31c23_7b44_4b0e_a627_1891480c5e03.slice/crio-52adce5af8e275b768add86d38ca6e805d5fbaf2c8c212b27d83bcd5598d7770 WatchSource:0}: Error finding container 52adce5af8e275b768add86d38ca6e805d5fbaf2c8c212b27d83bcd5598d7770: Status 404 returned error can't find the container with id 52adce5af8e275b768add86d38ca6e805d5fbaf2c8c212b27d83bcd5598d7770 Sep 29 19:09:57 crc kubenswrapper[4792]: I0929 19:09:57.230392 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-webhook-server-5888bf57-l785s"] Sep 29 19:09:57 crc kubenswrapper[4792]: I0929 19:09:57.954345 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-webhook-server-5888bf57-l785s" event={"ID":"19a31c23-7b44-4b0e-a627-1891480c5e03","Type":"ContainerStarted","Data":"52adce5af8e275b768add86d38ca6e805d5fbaf2c8c212b27d83bcd5598d7770"} Sep 29 19:09:57 crc kubenswrapper[4792]: I0929 19:09:57.957093 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-nd494" event={"ID":"a3531622-6a61-49a4-8af2-6c13bc5d4603","Type":"ContainerStarted","Data":"07ad48e46998a8c66666e5b82732f07b613218fd67b7e16bb67cd4f1e30ba8e5"} Sep 29 19:09:57 crc kubenswrapper[4792]: I0929 19:09:57.992142 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-nd494" podStartSLOduration=2.170437582 podStartE2EDuration="4.992127571s" podCreationTimestamp="2025-09-29 19:09:53 +0000 UTC" firstStartedPulling="2025-09-29 19:09:54.923877292 +0000 UTC m=+806.917184688" lastFinishedPulling="2025-09-29 19:09:57.745567281 +0000 UTC m=+809.738874677" observedRunningTime="2025-09-29 19:09:57.989325049 +0000 UTC m=+809.982632455" watchObservedRunningTime="2025-09-29 19:09:57.992127571 +0000 UTC m=+809.985434967" Sep 29 19:10:01 crc kubenswrapper[4792]: I0929 19:10:01.981651 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-controller-manager-798fbb9bbf-rvlk8" event={"ID":"f81e5f81-aa7e-4c65-900e-9a5929ca038b","Type":"ContainerStarted","Data":"e4413c78e0273a8b343ed961b1ee365375431932df4f6c54f2ec62c683da3621"} Sep 29 19:10:01 crc kubenswrapper[4792]: I0929 19:10:01.983166 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/metallb-operator-controller-manager-798fbb9bbf-rvlk8" Sep 29 19:10:02 crc kubenswrapper[4792]: I0929 19:10:02.030086 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/metallb-operator-controller-manager-798fbb9bbf-rvlk8" podStartSLOduration=2.540641912 podStartE2EDuration="7.030071697s" podCreationTimestamp="2025-09-29 19:09:55 +0000 UTC" firstStartedPulling="2025-09-29 19:09:56.673825434 +0000 UTC m=+808.667132830" lastFinishedPulling="2025-09-29 19:10:01.163255219 +0000 UTC m=+813.156562615" observedRunningTime="2025-09-29 19:10:02.023037378 +0000 UTC m=+814.016344774" watchObservedRunningTime="2025-09-29 19:10:02.030071697 +0000 UTC m=+814.023379093" Sep 29 19:10:03 crc kubenswrapper[4792]: I0929 19:10:03.805937 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-px2xb"] Sep 29 19:10:03 crc kubenswrapper[4792]: I0929 19:10:03.807393 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-px2xb" Sep 29 19:10:03 crc kubenswrapper[4792]: I0929 19:10:03.817328 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-px2xb"] Sep 29 19:10:03 crc kubenswrapper[4792]: I0929 19:10:03.913661 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/41bba70b-b6b7-4bac-90cb-0ee5722dac4a-utilities\") pod \"community-operators-px2xb\" (UID: \"41bba70b-b6b7-4bac-90cb-0ee5722dac4a\") " pod="openshift-marketplace/community-operators-px2xb" Sep 29 19:10:03 crc kubenswrapper[4792]: I0929 19:10:03.913713 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9cbg9\" (UniqueName: \"kubernetes.io/projected/41bba70b-b6b7-4bac-90cb-0ee5722dac4a-kube-api-access-9cbg9\") pod \"community-operators-px2xb\" (UID: \"41bba70b-b6b7-4bac-90cb-0ee5722dac4a\") " pod="openshift-marketplace/community-operators-px2xb" Sep 29 19:10:03 crc kubenswrapper[4792]: I0929 19:10:03.913760 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/41bba70b-b6b7-4bac-90cb-0ee5722dac4a-catalog-content\") pod \"community-operators-px2xb\" (UID: \"41bba70b-b6b7-4bac-90cb-0ee5722dac4a\") " pod="openshift-marketplace/community-operators-px2xb" Sep 29 19:10:03 crc kubenswrapper[4792]: I0929 19:10:03.996150 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-webhook-server-5888bf57-l785s" event={"ID":"19a31c23-7b44-4b0e-a627-1891480c5e03","Type":"ContainerStarted","Data":"414a71475e3d01eaf6cbedeb7166e69401199c4003c26b33d2fee93e4d44e963"} Sep 29 19:10:04 crc kubenswrapper[4792]: I0929 19:10:04.015124 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/41bba70b-b6b7-4bac-90cb-0ee5722dac4a-utilities\") pod \"community-operators-px2xb\" (UID: \"41bba70b-b6b7-4bac-90cb-0ee5722dac4a\") " pod="openshift-marketplace/community-operators-px2xb" Sep 29 19:10:04 crc kubenswrapper[4792]: I0929 19:10:04.015168 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9cbg9\" (UniqueName: \"kubernetes.io/projected/41bba70b-b6b7-4bac-90cb-0ee5722dac4a-kube-api-access-9cbg9\") pod \"community-operators-px2xb\" (UID: \"41bba70b-b6b7-4bac-90cb-0ee5722dac4a\") " pod="openshift-marketplace/community-operators-px2xb" Sep 29 19:10:04 crc kubenswrapper[4792]: I0929 19:10:04.015204 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/41bba70b-b6b7-4bac-90cb-0ee5722dac4a-catalog-content\") pod \"community-operators-px2xb\" (UID: \"41bba70b-b6b7-4bac-90cb-0ee5722dac4a\") " pod="openshift-marketplace/community-operators-px2xb" Sep 29 19:10:04 crc kubenswrapper[4792]: I0929 19:10:04.016027 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/41bba70b-b6b7-4bac-90cb-0ee5722dac4a-utilities\") pod \"community-operators-px2xb\" (UID: \"41bba70b-b6b7-4bac-90cb-0ee5722dac4a\") " pod="openshift-marketplace/community-operators-px2xb" Sep 29 19:10:04 crc kubenswrapper[4792]: I0929 19:10:04.016245 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/41bba70b-b6b7-4bac-90cb-0ee5722dac4a-catalog-content\") pod \"community-operators-px2xb\" (UID: \"41bba70b-b6b7-4bac-90cb-0ee5722dac4a\") " pod="openshift-marketplace/community-operators-px2xb" Sep 29 19:10:04 crc kubenswrapper[4792]: I0929 19:10:04.017008 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/metallb-operator-webhook-server-5888bf57-l785s" podStartSLOduration=1.739115006 podStartE2EDuration="8.016996074s" podCreationTimestamp="2025-09-29 19:09:56 +0000 UTC" firstStartedPulling="2025-09-29 19:09:57.231284452 +0000 UTC m=+809.224591838" lastFinishedPulling="2025-09-29 19:10:03.50916551 +0000 UTC m=+815.502472906" observedRunningTime="2025-09-29 19:10:04.015862275 +0000 UTC m=+816.009169691" watchObservedRunningTime="2025-09-29 19:10:04.016996074 +0000 UTC m=+816.010303470" Sep 29 19:10:04 crc kubenswrapper[4792]: I0929 19:10:04.043412 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9cbg9\" (UniqueName: \"kubernetes.io/projected/41bba70b-b6b7-4bac-90cb-0ee5722dac4a-kube-api-access-9cbg9\") pod \"community-operators-px2xb\" (UID: \"41bba70b-b6b7-4bac-90cb-0ee5722dac4a\") " pod="openshift-marketplace/community-operators-px2xb" Sep 29 19:10:04 crc kubenswrapper[4792]: I0929 19:10:04.083560 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-nd494" Sep 29 19:10:04 crc kubenswrapper[4792]: I0929 19:10:04.083619 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-nd494" Sep 29 19:10:04 crc kubenswrapper[4792]: I0929 19:10:04.122091 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-px2xb" Sep 29 19:10:04 crc kubenswrapper[4792]: I0929 19:10:04.150430 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-nd494" Sep 29 19:10:04 crc kubenswrapper[4792]: I0929 19:10:04.669440 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-px2xb"] Sep 29 19:10:05 crc kubenswrapper[4792]: I0929 19:10:05.003564 4792 generic.go:334] "Generic (PLEG): container finished" podID="41bba70b-b6b7-4bac-90cb-0ee5722dac4a" containerID="daf8534cefc80a8eba42ddab0a6c648aac97e7d8987b79f53dc5aec8c592d93d" exitCode=0 Sep 29 19:10:05 crc kubenswrapper[4792]: I0929 19:10:05.003664 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-px2xb" event={"ID":"41bba70b-b6b7-4bac-90cb-0ee5722dac4a","Type":"ContainerDied","Data":"daf8534cefc80a8eba42ddab0a6c648aac97e7d8987b79f53dc5aec8c592d93d"} Sep 29 19:10:05 crc kubenswrapper[4792]: I0929 19:10:05.003706 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-px2xb" event={"ID":"41bba70b-b6b7-4bac-90cb-0ee5722dac4a","Type":"ContainerStarted","Data":"7fa5538750d50398147edf9df1b8f3702440073a4b37d66de6e6d3cb1091e181"} Sep 29 19:10:05 crc kubenswrapper[4792]: I0929 19:10:05.004914 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/metallb-operator-webhook-server-5888bf57-l785s" Sep 29 19:10:05 crc kubenswrapper[4792]: I0929 19:10:05.056021 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-nd494" Sep 29 19:10:06 crc kubenswrapper[4792]: I0929 19:10:06.391792 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-nd494"] Sep 29 19:10:07 crc kubenswrapper[4792]: I0929 19:10:07.015354 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-nd494" podUID="a3531622-6a61-49a4-8af2-6c13bc5d4603" containerName="registry-server" containerID="cri-o://07ad48e46998a8c66666e5b82732f07b613218fd67b7e16bb67cd4f1e30ba8e5" gracePeriod=2 Sep 29 19:10:07 crc kubenswrapper[4792]: I0929 19:10:07.025798 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-px2xb" event={"ID":"41bba70b-b6b7-4bac-90cb-0ee5722dac4a","Type":"ContainerStarted","Data":"0547ca35853410183abe7de7175af4545170dd616fc8c9e38e11d7e39b96a3ae"} Sep 29 19:10:07 crc kubenswrapper[4792]: I0929 19:10:07.418461 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-nd494" Sep 29 19:10:07 crc kubenswrapper[4792]: I0929 19:10:07.466263 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a3531622-6a61-49a4-8af2-6c13bc5d4603-catalog-content\") pod \"a3531622-6a61-49a4-8af2-6c13bc5d4603\" (UID: \"a3531622-6a61-49a4-8af2-6c13bc5d4603\") " Sep 29 19:10:07 crc kubenswrapper[4792]: I0929 19:10:07.474036 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-m8c8r\" (UniqueName: \"kubernetes.io/projected/a3531622-6a61-49a4-8af2-6c13bc5d4603-kube-api-access-m8c8r\") pod \"a3531622-6a61-49a4-8af2-6c13bc5d4603\" (UID: \"a3531622-6a61-49a4-8af2-6c13bc5d4603\") " Sep 29 19:10:07 crc kubenswrapper[4792]: I0929 19:10:07.474134 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a3531622-6a61-49a4-8af2-6c13bc5d4603-utilities\") pod \"a3531622-6a61-49a4-8af2-6c13bc5d4603\" (UID: \"a3531622-6a61-49a4-8af2-6c13bc5d4603\") " Sep 29 19:10:07 crc kubenswrapper[4792]: I0929 19:10:07.474924 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a3531622-6a61-49a4-8af2-6c13bc5d4603-utilities" (OuterVolumeSpecName: "utilities") pod "a3531622-6a61-49a4-8af2-6c13bc5d4603" (UID: "a3531622-6a61-49a4-8af2-6c13bc5d4603"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 19:10:07 crc kubenswrapper[4792]: I0929 19:10:07.478594 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a3531622-6a61-49a4-8af2-6c13bc5d4603-kube-api-access-m8c8r" (OuterVolumeSpecName: "kube-api-access-m8c8r") pod "a3531622-6a61-49a4-8af2-6c13bc5d4603" (UID: "a3531622-6a61-49a4-8af2-6c13bc5d4603"). InnerVolumeSpecName "kube-api-access-m8c8r". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:10:07 crc kubenswrapper[4792]: I0929 19:10:07.514874 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a3531622-6a61-49a4-8af2-6c13bc5d4603-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "a3531622-6a61-49a4-8af2-6c13bc5d4603" (UID: "a3531622-6a61-49a4-8af2-6c13bc5d4603"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 19:10:07 crc kubenswrapper[4792]: I0929 19:10:07.576094 4792 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a3531622-6a61-49a4-8af2-6c13bc5d4603-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 19:10:07 crc kubenswrapper[4792]: I0929 19:10:07.576132 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-m8c8r\" (UniqueName: \"kubernetes.io/projected/a3531622-6a61-49a4-8af2-6c13bc5d4603-kube-api-access-m8c8r\") on node \"crc\" DevicePath \"\"" Sep 29 19:10:07 crc kubenswrapper[4792]: I0929 19:10:07.576146 4792 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a3531622-6a61-49a4-8af2-6c13bc5d4603-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 19:10:08 crc kubenswrapper[4792]: I0929 19:10:08.027678 4792 generic.go:334] "Generic (PLEG): container finished" podID="a3531622-6a61-49a4-8af2-6c13bc5d4603" containerID="07ad48e46998a8c66666e5b82732f07b613218fd67b7e16bb67cd4f1e30ba8e5" exitCode=0 Sep 29 19:10:08 crc kubenswrapper[4792]: I0929 19:10:08.027745 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-nd494" event={"ID":"a3531622-6a61-49a4-8af2-6c13bc5d4603","Type":"ContainerDied","Data":"07ad48e46998a8c66666e5b82732f07b613218fd67b7e16bb67cd4f1e30ba8e5"} Sep 29 19:10:08 crc kubenswrapper[4792]: I0929 19:10:08.027773 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-nd494" event={"ID":"a3531622-6a61-49a4-8af2-6c13bc5d4603","Type":"ContainerDied","Data":"122071fd78ca0383b534cb9ce207094990c3ac9a9c51cd0d48acc980db1beca5"} Sep 29 19:10:08 crc kubenswrapper[4792]: I0929 19:10:08.027792 4792 scope.go:117] "RemoveContainer" containerID="07ad48e46998a8c66666e5b82732f07b613218fd67b7e16bb67cd4f1e30ba8e5" Sep 29 19:10:08 crc kubenswrapper[4792]: I0929 19:10:08.027941 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-nd494" Sep 29 19:10:08 crc kubenswrapper[4792]: I0929 19:10:08.031306 4792 generic.go:334] "Generic (PLEG): container finished" podID="41bba70b-b6b7-4bac-90cb-0ee5722dac4a" containerID="0547ca35853410183abe7de7175af4545170dd616fc8c9e38e11d7e39b96a3ae" exitCode=0 Sep 29 19:10:08 crc kubenswrapper[4792]: I0929 19:10:08.031348 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-px2xb" event={"ID":"41bba70b-b6b7-4bac-90cb-0ee5722dac4a","Type":"ContainerDied","Data":"0547ca35853410183abe7de7175af4545170dd616fc8c9e38e11d7e39b96a3ae"} Sep 29 19:10:08 crc kubenswrapper[4792]: I0929 19:10:08.049235 4792 scope.go:117] "RemoveContainer" containerID="0900bf8fcc7b03813915ddd8b7b3df47a0d87e3f4462cc648598c6716130aa0d" Sep 29 19:10:08 crc kubenswrapper[4792]: I0929 19:10:08.079013 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-nd494"] Sep 29 19:10:08 crc kubenswrapper[4792]: I0929 19:10:08.082736 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-nd494"] Sep 29 19:10:08 crc kubenswrapper[4792]: I0929 19:10:08.085391 4792 scope.go:117] "RemoveContainer" containerID="b9145df8cc565a75420557e3c0e62d1a190b3a4e1dabdbe74a249df150f6171c" Sep 29 19:10:08 crc kubenswrapper[4792]: I0929 19:10:08.100671 4792 scope.go:117] "RemoveContainer" containerID="07ad48e46998a8c66666e5b82732f07b613218fd67b7e16bb67cd4f1e30ba8e5" Sep 29 19:10:08 crc kubenswrapper[4792]: E0929 19:10:08.101327 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"07ad48e46998a8c66666e5b82732f07b613218fd67b7e16bb67cd4f1e30ba8e5\": container with ID starting with 07ad48e46998a8c66666e5b82732f07b613218fd67b7e16bb67cd4f1e30ba8e5 not found: ID does not exist" containerID="07ad48e46998a8c66666e5b82732f07b613218fd67b7e16bb67cd4f1e30ba8e5" Sep 29 19:10:08 crc kubenswrapper[4792]: I0929 19:10:08.101368 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"07ad48e46998a8c66666e5b82732f07b613218fd67b7e16bb67cd4f1e30ba8e5"} err="failed to get container status \"07ad48e46998a8c66666e5b82732f07b613218fd67b7e16bb67cd4f1e30ba8e5\": rpc error: code = NotFound desc = could not find container \"07ad48e46998a8c66666e5b82732f07b613218fd67b7e16bb67cd4f1e30ba8e5\": container with ID starting with 07ad48e46998a8c66666e5b82732f07b613218fd67b7e16bb67cd4f1e30ba8e5 not found: ID does not exist" Sep 29 19:10:08 crc kubenswrapper[4792]: I0929 19:10:08.101392 4792 scope.go:117] "RemoveContainer" containerID="0900bf8fcc7b03813915ddd8b7b3df47a0d87e3f4462cc648598c6716130aa0d" Sep 29 19:10:08 crc kubenswrapper[4792]: E0929 19:10:08.102309 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0900bf8fcc7b03813915ddd8b7b3df47a0d87e3f4462cc648598c6716130aa0d\": container with ID starting with 0900bf8fcc7b03813915ddd8b7b3df47a0d87e3f4462cc648598c6716130aa0d not found: ID does not exist" containerID="0900bf8fcc7b03813915ddd8b7b3df47a0d87e3f4462cc648598c6716130aa0d" Sep 29 19:10:08 crc kubenswrapper[4792]: I0929 19:10:08.102347 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0900bf8fcc7b03813915ddd8b7b3df47a0d87e3f4462cc648598c6716130aa0d"} err="failed to get container status \"0900bf8fcc7b03813915ddd8b7b3df47a0d87e3f4462cc648598c6716130aa0d\": rpc error: code = NotFound desc = could not find container \"0900bf8fcc7b03813915ddd8b7b3df47a0d87e3f4462cc648598c6716130aa0d\": container with ID starting with 0900bf8fcc7b03813915ddd8b7b3df47a0d87e3f4462cc648598c6716130aa0d not found: ID does not exist" Sep 29 19:10:08 crc kubenswrapper[4792]: I0929 19:10:08.102374 4792 scope.go:117] "RemoveContainer" containerID="b9145df8cc565a75420557e3c0e62d1a190b3a4e1dabdbe74a249df150f6171c" Sep 29 19:10:08 crc kubenswrapper[4792]: E0929 19:10:08.102652 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b9145df8cc565a75420557e3c0e62d1a190b3a4e1dabdbe74a249df150f6171c\": container with ID starting with b9145df8cc565a75420557e3c0e62d1a190b3a4e1dabdbe74a249df150f6171c not found: ID does not exist" containerID="b9145df8cc565a75420557e3c0e62d1a190b3a4e1dabdbe74a249df150f6171c" Sep 29 19:10:08 crc kubenswrapper[4792]: I0929 19:10:08.102675 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b9145df8cc565a75420557e3c0e62d1a190b3a4e1dabdbe74a249df150f6171c"} err="failed to get container status \"b9145df8cc565a75420557e3c0e62d1a190b3a4e1dabdbe74a249df150f6171c\": rpc error: code = NotFound desc = could not find container \"b9145df8cc565a75420557e3c0e62d1a190b3a4e1dabdbe74a249df150f6171c\": container with ID starting with b9145df8cc565a75420557e3c0e62d1a190b3a4e1dabdbe74a249df150f6171c not found: ID does not exist" Sep 29 19:10:09 crc kubenswrapper[4792]: I0929 19:10:09.023605 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a3531622-6a61-49a4-8af2-6c13bc5d4603" path="/var/lib/kubelet/pods/a3531622-6a61-49a4-8af2-6c13bc5d4603/volumes" Sep 29 19:10:09 crc kubenswrapper[4792]: I0929 19:10:09.037236 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-px2xb" event={"ID":"41bba70b-b6b7-4bac-90cb-0ee5722dac4a","Type":"ContainerStarted","Data":"ea8b4bc16ef81ffbb4d8ffdaaacd05515188fb4323ecd046332efb0ac7342eaf"} Sep 29 19:10:09 crc kubenswrapper[4792]: I0929 19:10:09.054004 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-px2xb" podStartSLOduration=2.588438878 podStartE2EDuration="6.053983025s" podCreationTimestamp="2025-09-29 19:10:03 +0000 UTC" firstStartedPulling="2025-09-29 19:10:05.007105111 +0000 UTC m=+817.000412507" lastFinishedPulling="2025-09-29 19:10:08.472649258 +0000 UTC m=+820.465956654" observedRunningTime="2025-09-29 19:10:09.053200925 +0000 UTC m=+821.046508341" watchObservedRunningTime="2025-09-29 19:10:09.053983025 +0000 UTC m=+821.047290421" Sep 29 19:10:11 crc kubenswrapper[4792]: I0929 19:10:11.960208 4792 patch_prober.go:28] interesting pod/machine-config-daemon-p5q59 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 19:10:11 crc kubenswrapper[4792]: I0929 19:10:11.960560 4792 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" podUID="0ae66548-086e-4ca9-bd6f-281ce46e7557" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 19:10:14 crc kubenswrapper[4792]: I0929 19:10:14.123190 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-px2xb" Sep 29 19:10:14 crc kubenswrapper[4792]: I0929 19:10:14.124162 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-px2xb" Sep 29 19:10:14 crc kubenswrapper[4792]: I0929 19:10:14.158964 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-px2xb" Sep 29 19:10:15 crc kubenswrapper[4792]: I0929 19:10:15.105919 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-px2xb" Sep 29 19:10:16 crc kubenswrapper[4792]: I0929 19:10:16.588047 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-px2xb"] Sep 29 19:10:16 crc kubenswrapper[4792]: I0929 19:10:16.793693 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/metallb-operator-webhook-server-5888bf57-l785s" Sep 29 19:10:18 crc kubenswrapper[4792]: I0929 19:10:18.084717 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-px2xb" podUID="41bba70b-b6b7-4bac-90cb-0ee5722dac4a" containerName="registry-server" containerID="cri-o://ea8b4bc16ef81ffbb4d8ffdaaacd05515188fb4323ecd046332efb0ac7342eaf" gracePeriod=2 Sep 29 19:10:18 crc kubenswrapper[4792]: I0929 19:10:18.406081 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-px2xb" Sep 29 19:10:18 crc kubenswrapper[4792]: I0929 19:10:18.527382 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/41bba70b-b6b7-4bac-90cb-0ee5722dac4a-utilities\") pod \"41bba70b-b6b7-4bac-90cb-0ee5722dac4a\" (UID: \"41bba70b-b6b7-4bac-90cb-0ee5722dac4a\") " Sep 29 19:10:18 crc kubenswrapper[4792]: I0929 19:10:18.527561 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/41bba70b-b6b7-4bac-90cb-0ee5722dac4a-catalog-content\") pod \"41bba70b-b6b7-4bac-90cb-0ee5722dac4a\" (UID: \"41bba70b-b6b7-4bac-90cb-0ee5722dac4a\") " Sep 29 19:10:18 crc kubenswrapper[4792]: I0929 19:10:18.527614 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9cbg9\" (UniqueName: \"kubernetes.io/projected/41bba70b-b6b7-4bac-90cb-0ee5722dac4a-kube-api-access-9cbg9\") pod \"41bba70b-b6b7-4bac-90cb-0ee5722dac4a\" (UID: \"41bba70b-b6b7-4bac-90cb-0ee5722dac4a\") " Sep 29 19:10:18 crc kubenswrapper[4792]: I0929 19:10:18.528619 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/41bba70b-b6b7-4bac-90cb-0ee5722dac4a-utilities" (OuterVolumeSpecName: "utilities") pod "41bba70b-b6b7-4bac-90cb-0ee5722dac4a" (UID: "41bba70b-b6b7-4bac-90cb-0ee5722dac4a"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 19:10:18 crc kubenswrapper[4792]: I0929 19:10:18.533256 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/41bba70b-b6b7-4bac-90cb-0ee5722dac4a-kube-api-access-9cbg9" (OuterVolumeSpecName: "kube-api-access-9cbg9") pod "41bba70b-b6b7-4bac-90cb-0ee5722dac4a" (UID: "41bba70b-b6b7-4bac-90cb-0ee5722dac4a"). InnerVolumeSpecName "kube-api-access-9cbg9". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:10:18 crc kubenswrapper[4792]: I0929 19:10:18.571559 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/41bba70b-b6b7-4bac-90cb-0ee5722dac4a-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "41bba70b-b6b7-4bac-90cb-0ee5722dac4a" (UID: "41bba70b-b6b7-4bac-90cb-0ee5722dac4a"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 19:10:18 crc kubenswrapper[4792]: I0929 19:10:18.629334 4792 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/41bba70b-b6b7-4bac-90cb-0ee5722dac4a-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 19:10:18 crc kubenswrapper[4792]: I0929 19:10:18.629375 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9cbg9\" (UniqueName: \"kubernetes.io/projected/41bba70b-b6b7-4bac-90cb-0ee5722dac4a-kube-api-access-9cbg9\") on node \"crc\" DevicePath \"\"" Sep 29 19:10:18 crc kubenswrapper[4792]: I0929 19:10:18.629386 4792 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/41bba70b-b6b7-4bac-90cb-0ee5722dac4a-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 19:10:19 crc kubenswrapper[4792]: I0929 19:10:19.092031 4792 generic.go:334] "Generic (PLEG): container finished" podID="41bba70b-b6b7-4bac-90cb-0ee5722dac4a" containerID="ea8b4bc16ef81ffbb4d8ffdaaacd05515188fb4323ecd046332efb0ac7342eaf" exitCode=0 Sep 29 19:10:19 crc kubenswrapper[4792]: I0929 19:10:19.092095 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-px2xb" event={"ID":"41bba70b-b6b7-4bac-90cb-0ee5722dac4a","Type":"ContainerDied","Data":"ea8b4bc16ef81ffbb4d8ffdaaacd05515188fb4323ecd046332efb0ac7342eaf"} Sep 29 19:10:19 crc kubenswrapper[4792]: I0929 19:10:19.092156 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-px2xb" event={"ID":"41bba70b-b6b7-4bac-90cb-0ee5722dac4a","Type":"ContainerDied","Data":"7fa5538750d50398147edf9df1b8f3702440073a4b37d66de6e6d3cb1091e181"} Sep 29 19:10:19 crc kubenswrapper[4792]: I0929 19:10:19.092176 4792 scope.go:117] "RemoveContainer" containerID="ea8b4bc16ef81ffbb4d8ffdaaacd05515188fb4323ecd046332efb0ac7342eaf" Sep 29 19:10:19 crc kubenswrapper[4792]: I0929 19:10:19.092110 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-px2xb" Sep 29 19:10:19 crc kubenswrapper[4792]: I0929 19:10:19.113903 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-px2xb"] Sep 29 19:10:19 crc kubenswrapper[4792]: I0929 19:10:19.116625 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-px2xb"] Sep 29 19:10:19 crc kubenswrapper[4792]: I0929 19:10:19.120130 4792 scope.go:117] "RemoveContainer" containerID="0547ca35853410183abe7de7175af4545170dd616fc8c9e38e11d7e39b96a3ae" Sep 29 19:10:19 crc kubenswrapper[4792]: I0929 19:10:19.135219 4792 scope.go:117] "RemoveContainer" containerID="daf8534cefc80a8eba42ddab0a6c648aac97e7d8987b79f53dc5aec8c592d93d" Sep 29 19:10:19 crc kubenswrapper[4792]: I0929 19:10:19.171832 4792 scope.go:117] "RemoveContainer" containerID="ea8b4bc16ef81ffbb4d8ffdaaacd05515188fb4323ecd046332efb0ac7342eaf" Sep 29 19:10:19 crc kubenswrapper[4792]: E0929 19:10:19.172970 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ea8b4bc16ef81ffbb4d8ffdaaacd05515188fb4323ecd046332efb0ac7342eaf\": container with ID starting with ea8b4bc16ef81ffbb4d8ffdaaacd05515188fb4323ecd046332efb0ac7342eaf not found: ID does not exist" containerID="ea8b4bc16ef81ffbb4d8ffdaaacd05515188fb4323ecd046332efb0ac7342eaf" Sep 29 19:10:19 crc kubenswrapper[4792]: I0929 19:10:19.173018 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ea8b4bc16ef81ffbb4d8ffdaaacd05515188fb4323ecd046332efb0ac7342eaf"} err="failed to get container status \"ea8b4bc16ef81ffbb4d8ffdaaacd05515188fb4323ecd046332efb0ac7342eaf\": rpc error: code = NotFound desc = could not find container \"ea8b4bc16ef81ffbb4d8ffdaaacd05515188fb4323ecd046332efb0ac7342eaf\": container with ID starting with ea8b4bc16ef81ffbb4d8ffdaaacd05515188fb4323ecd046332efb0ac7342eaf not found: ID does not exist" Sep 29 19:10:19 crc kubenswrapper[4792]: I0929 19:10:19.173049 4792 scope.go:117] "RemoveContainer" containerID="0547ca35853410183abe7de7175af4545170dd616fc8c9e38e11d7e39b96a3ae" Sep 29 19:10:19 crc kubenswrapper[4792]: E0929 19:10:19.173515 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0547ca35853410183abe7de7175af4545170dd616fc8c9e38e11d7e39b96a3ae\": container with ID starting with 0547ca35853410183abe7de7175af4545170dd616fc8c9e38e11d7e39b96a3ae not found: ID does not exist" containerID="0547ca35853410183abe7de7175af4545170dd616fc8c9e38e11d7e39b96a3ae" Sep 29 19:10:19 crc kubenswrapper[4792]: I0929 19:10:19.173552 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0547ca35853410183abe7de7175af4545170dd616fc8c9e38e11d7e39b96a3ae"} err="failed to get container status \"0547ca35853410183abe7de7175af4545170dd616fc8c9e38e11d7e39b96a3ae\": rpc error: code = NotFound desc = could not find container \"0547ca35853410183abe7de7175af4545170dd616fc8c9e38e11d7e39b96a3ae\": container with ID starting with 0547ca35853410183abe7de7175af4545170dd616fc8c9e38e11d7e39b96a3ae not found: ID does not exist" Sep 29 19:10:19 crc kubenswrapper[4792]: I0929 19:10:19.173575 4792 scope.go:117] "RemoveContainer" containerID="daf8534cefc80a8eba42ddab0a6c648aac97e7d8987b79f53dc5aec8c592d93d" Sep 29 19:10:19 crc kubenswrapper[4792]: E0929 19:10:19.174421 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"daf8534cefc80a8eba42ddab0a6c648aac97e7d8987b79f53dc5aec8c592d93d\": container with ID starting with daf8534cefc80a8eba42ddab0a6c648aac97e7d8987b79f53dc5aec8c592d93d not found: ID does not exist" containerID="daf8534cefc80a8eba42ddab0a6c648aac97e7d8987b79f53dc5aec8c592d93d" Sep 29 19:10:19 crc kubenswrapper[4792]: I0929 19:10:19.174454 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"daf8534cefc80a8eba42ddab0a6c648aac97e7d8987b79f53dc5aec8c592d93d"} err="failed to get container status \"daf8534cefc80a8eba42ddab0a6c648aac97e7d8987b79f53dc5aec8c592d93d\": rpc error: code = NotFound desc = could not find container \"daf8534cefc80a8eba42ddab0a6c648aac97e7d8987b79f53dc5aec8c592d93d\": container with ID starting with daf8534cefc80a8eba42ddab0a6c648aac97e7d8987b79f53dc5aec8c592d93d not found: ID does not exist" Sep 29 19:10:21 crc kubenswrapper[4792]: I0929 19:10:21.025243 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="41bba70b-b6b7-4bac-90cb-0ee5722dac4a" path="/var/lib/kubelet/pods/41bba70b-b6b7-4bac-90cb-0ee5722dac4a/volumes" Sep 29 19:10:36 crc kubenswrapper[4792]: I0929 19:10:36.315275 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/metallb-operator-controller-manager-798fbb9bbf-rvlk8" Sep 29 19:10:37 crc kubenswrapper[4792]: I0929 19:10:37.031571 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/frr-k8s-wrpbj"] Sep 29 19:10:37 crc kubenswrapper[4792]: E0929 19:10:37.031897 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="41bba70b-b6b7-4bac-90cb-0ee5722dac4a" containerName="registry-server" Sep 29 19:10:37 crc kubenswrapper[4792]: I0929 19:10:37.031912 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="41bba70b-b6b7-4bac-90cb-0ee5722dac4a" containerName="registry-server" Sep 29 19:10:37 crc kubenswrapper[4792]: E0929 19:10:37.031927 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="41bba70b-b6b7-4bac-90cb-0ee5722dac4a" containerName="extract-content" Sep 29 19:10:37 crc kubenswrapper[4792]: I0929 19:10:37.031934 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="41bba70b-b6b7-4bac-90cb-0ee5722dac4a" containerName="extract-content" Sep 29 19:10:37 crc kubenswrapper[4792]: E0929 19:10:37.031949 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="41bba70b-b6b7-4bac-90cb-0ee5722dac4a" containerName="extract-utilities" Sep 29 19:10:37 crc kubenswrapper[4792]: I0929 19:10:37.031957 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="41bba70b-b6b7-4bac-90cb-0ee5722dac4a" containerName="extract-utilities" Sep 29 19:10:37 crc kubenswrapper[4792]: E0929 19:10:37.031979 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a3531622-6a61-49a4-8af2-6c13bc5d4603" containerName="extract-content" Sep 29 19:10:37 crc kubenswrapper[4792]: I0929 19:10:37.031987 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="a3531622-6a61-49a4-8af2-6c13bc5d4603" containerName="extract-content" Sep 29 19:10:37 crc kubenswrapper[4792]: E0929 19:10:37.031996 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a3531622-6a61-49a4-8af2-6c13bc5d4603" containerName="extract-utilities" Sep 29 19:10:37 crc kubenswrapper[4792]: I0929 19:10:37.032003 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="a3531622-6a61-49a4-8af2-6c13bc5d4603" containerName="extract-utilities" Sep 29 19:10:37 crc kubenswrapper[4792]: E0929 19:10:37.032016 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a3531622-6a61-49a4-8af2-6c13bc5d4603" containerName="registry-server" Sep 29 19:10:37 crc kubenswrapper[4792]: I0929 19:10:37.032022 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="a3531622-6a61-49a4-8af2-6c13bc5d4603" containerName="registry-server" Sep 29 19:10:37 crc kubenswrapper[4792]: I0929 19:10:37.032173 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="a3531622-6a61-49a4-8af2-6c13bc5d4603" containerName="registry-server" Sep 29 19:10:37 crc kubenswrapper[4792]: I0929 19:10:37.032188 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="41bba70b-b6b7-4bac-90cb-0ee5722dac4a" containerName="registry-server" Sep 29 19:10:37 crc kubenswrapper[4792]: I0929 19:10:37.034660 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-wrpbj" Sep 29 19:10:37 crc kubenswrapper[4792]: I0929 19:10:37.042469 4792 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-certs-secret" Sep 29 19:10:37 crc kubenswrapper[4792]: I0929 19:10:37.042920 4792 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-daemon-dockercfg-xd96j" Sep 29 19:10:37 crc kubenswrapper[4792]: I0929 19:10:37.043086 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"frr-startup" Sep 29 19:10:37 crc kubenswrapper[4792]: I0929 19:10:37.067516 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/frr-k8s-webhook-server-5478bdb765-fw449"] Sep 29 19:10:37 crc kubenswrapper[4792]: I0929 19:10:37.068260 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/frr-k8s-webhook-server-5478bdb765-fw449"] Sep 29 19:10:37 crc kubenswrapper[4792]: I0929 19:10:37.068355 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-webhook-server-5478bdb765-fw449" Sep 29 19:10:37 crc kubenswrapper[4792]: I0929 19:10:37.070500 4792 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-webhook-server-cert" Sep 29 19:10:37 crc kubenswrapper[4792]: I0929 19:10:37.163611 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/speaker-8csps"] Sep 29 19:10:37 crc kubenswrapper[4792]: I0929 19:10:37.164680 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/speaker-8csps" Sep 29 19:10:37 crc kubenswrapper[4792]: I0929 19:10:37.166738 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/698fb3db-6e7c-478d-bfae-70fbfa85e384-frr-sockets\") pod \"frr-k8s-wrpbj\" (UID: \"698fb3db-6e7c-478d-bfae-70fbfa85e384\") " pod="metallb-system/frr-k8s-wrpbj" Sep 29 19:10:37 crc kubenswrapper[4792]: I0929 19:10:37.166780 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/698fb3db-6e7c-478d-bfae-70fbfa85e384-metrics\") pod \"frr-k8s-wrpbj\" (UID: \"698fb3db-6e7c-478d-bfae-70fbfa85e384\") " pod="metallb-system/frr-k8s-wrpbj" Sep 29 19:10:37 crc kubenswrapper[4792]: I0929 19:10:37.166808 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/698fb3db-6e7c-478d-bfae-70fbfa85e384-reloader\") pod \"frr-k8s-wrpbj\" (UID: \"698fb3db-6e7c-478d-bfae-70fbfa85e384\") " pod="metallb-system/frr-k8s-wrpbj" Sep 29 19:10:37 crc kubenswrapper[4792]: I0929 19:10:37.166826 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/698fb3db-6e7c-478d-bfae-70fbfa85e384-frr-startup\") pod \"frr-k8s-wrpbj\" (UID: \"698fb3db-6e7c-478d-bfae-70fbfa85e384\") " pod="metallb-system/frr-k8s-wrpbj" Sep 29 19:10:37 crc kubenswrapper[4792]: I0929 19:10:37.166910 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/698fb3db-6e7c-478d-bfae-70fbfa85e384-frr-conf\") pod \"frr-k8s-wrpbj\" (UID: \"698fb3db-6e7c-478d-bfae-70fbfa85e384\") " pod="metallb-system/frr-k8s-wrpbj" Sep 29 19:10:37 crc kubenswrapper[4792]: I0929 19:10:37.166935 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/698fb3db-6e7c-478d-bfae-70fbfa85e384-metrics-certs\") pod \"frr-k8s-wrpbj\" (UID: \"698fb3db-6e7c-478d-bfae-70fbfa85e384\") " pod="metallb-system/frr-k8s-wrpbj" Sep 29 19:10:37 crc kubenswrapper[4792]: I0929 19:10:37.166956 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5xpvv\" (UniqueName: \"kubernetes.io/projected/698fb3db-6e7c-478d-bfae-70fbfa85e384-kube-api-access-5xpvv\") pod \"frr-k8s-wrpbj\" (UID: \"698fb3db-6e7c-478d-bfae-70fbfa85e384\") " pod="metallb-system/frr-k8s-wrpbj" Sep 29 19:10:37 crc kubenswrapper[4792]: I0929 19:10:37.168545 4792 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-memberlist" Sep 29 19:10:37 crc kubenswrapper[4792]: I0929 19:10:37.168581 4792 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"speaker-certs-secret" Sep 29 19:10:37 crc kubenswrapper[4792]: I0929 19:10:37.168552 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"metallb-excludel2" Sep 29 19:10:37 crc kubenswrapper[4792]: I0929 19:10:37.169039 4792 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"speaker-dockercfg-v8mdt" Sep 29 19:10:37 crc kubenswrapper[4792]: I0929 19:10:37.185480 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/controller-5d688f5ffc-kjkvm"] Sep 29 19:10:37 crc kubenswrapper[4792]: I0929 19:10:37.186673 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/controller-5d688f5ffc-kjkvm" Sep 29 19:10:37 crc kubenswrapper[4792]: I0929 19:10:37.190554 4792 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"controller-certs-secret" Sep 29 19:10:37 crc kubenswrapper[4792]: I0929 19:10:37.205052 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/controller-5d688f5ffc-kjkvm"] Sep 29 19:10:37 crc kubenswrapper[4792]: I0929 19:10:37.268836 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/c8d5189a-0868-46dc-881c-077f4d5be810-memberlist\") pod \"speaker-8csps\" (UID: \"c8d5189a-0868-46dc-881c-077f4d5be810\") " pod="metallb-system/speaker-8csps" Sep 29 19:10:37 crc kubenswrapper[4792]: I0929 19:10:37.268922 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/698fb3db-6e7c-478d-bfae-70fbfa85e384-frr-sockets\") pod \"frr-k8s-wrpbj\" (UID: \"698fb3db-6e7c-478d-bfae-70fbfa85e384\") " pod="metallb-system/frr-k8s-wrpbj" Sep 29 19:10:37 crc kubenswrapper[4792]: I0929 19:10:37.268973 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/698fb3db-6e7c-478d-bfae-70fbfa85e384-metrics\") pod \"frr-k8s-wrpbj\" (UID: \"698fb3db-6e7c-478d-bfae-70fbfa85e384\") " pod="metallb-system/frr-k8s-wrpbj" Sep 29 19:10:37 crc kubenswrapper[4792]: I0929 19:10:37.269376 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mbp7f\" (UniqueName: \"kubernetes.io/projected/e4991702-0228-4b1e-abc9-01d614664746-kube-api-access-mbp7f\") pod \"frr-k8s-webhook-server-5478bdb765-fw449\" (UID: \"e4991702-0228-4b1e-abc9-01d614664746\") " pod="metallb-system/frr-k8s-webhook-server-5478bdb765-fw449" Sep 29 19:10:37 crc kubenswrapper[4792]: I0929 19:10:37.269408 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/698fb3db-6e7c-478d-bfae-70fbfa85e384-reloader\") pod \"frr-k8s-wrpbj\" (UID: \"698fb3db-6e7c-478d-bfae-70fbfa85e384\") " pod="metallb-system/frr-k8s-wrpbj" Sep 29 19:10:37 crc kubenswrapper[4792]: I0929 19:10:37.269484 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/698fb3db-6e7c-478d-bfae-70fbfa85e384-frr-startup\") pod \"frr-k8s-wrpbj\" (UID: \"698fb3db-6e7c-478d-bfae-70fbfa85e384\") " pod="metallb-system/frr-k8s-wrpbj" Sep 29 19:10:37 crc kubenswrapper[4792]: I0929 19:10:37.269507 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/698fb3db-6e7c-478d-bfae-70fbfa85e384-frr-conf\") pod \"frr-k8s-wrpbj\" (UID: \"698fb3db-6e7c-478d-bfae-70fbfa85e384\") " pod="metallb-system/frr-k8s-wrpbj" Sep 29 19:10:37 crc kubenswrapper[4792]: I0929 19:10:37.269684 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/698fb3db-6e7c-478d-bfae-70fbfa85e384-metrics-certs\") pod \"frr-k8s-wrpbj\" (UID: \"698fb3db-6e7c-478d-bfae-70fbfa85e384\") " pod="metallb-system/frr-k8s-wrpbj" Sep 29 19:10:37 crc kubenswrapper[4792]: I0929 19:10:37.269697 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/698fb3db-6e7c-478d-bfae-70fbfa85e384-frr-sockets\") pod \"frr-k8s-wrpbj\" (UID: \"698fb3db-6e7c-478d-bfae-70fbfa85e384\") " pod="metallb-system/frr-k8s-wrpbj" Sep 29 19:10:37 crc kubenswrapper[4792]: I0929 19:10:37.269758 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5xpvv\" (UniqueName: \"kubernetes.io/projected/698fb3db-6e7c-478d-bfae-70fbfa85e384-kube-api-access-5xpvv\") pod \"frr-k8s-wrpbj\" (UID: \"698fb3db-6e7c-478d-bfae-70fbfa85e384\") " pod="metallb-system/frr-k8s-wrpbj" Sep 29 19:10:37 crc kubenswrapper[4792]: I0929 19:10:37.269962 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/e4991702-0228-4b1e-abc9-01d614664746-cert\") pod \"frr-k8s-webhook-server-5478bdb765-fw449\" (UID: \"e4991702-0228-4b1e-abc9-01d614664746\") " pod="metallb-system/frr-k8s-webhook-server-5478bdb765-fw449" Sep 29 19:10:37 crc kubenswrapper[4792]: I0929 19:10:37.270077 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kvtz2\" (UniqueName: \"kubernetes.io/projected/c8d5189a-0868-46dc-881c-077f4d5be810-kube-api-access-kvtz2\") pod \"speaker-8csps\" (UID: \"c8d5189a-0868-46dc-881c-077f4d5be810\") " pod="metallb-system/speaker-8csps" Sep 29 19:10:37 crc kubenswrapper[4792]: I0929 19:10:37.270188 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/c8d5189a-0868-46dc-881c-077f4d5be810-metallb-excludel2\") pod \"speaker-8csps\" (UID: \"c8d5189a-0868-46dc-881c-077f4d5be810\") " pod="metallb-system/speaker-8csps" Sep 29 19:10:37 crc kubenswrapper[4792]: I0929 19:10:37.270314 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c8d5189a-0868-46dc-881c-077f4d5be810-metrics-certs\") pod \"speaker-8csps\" (UID: \"c8d5189a-0868-46dc-881c-077f4d5be810\") " pod="metallb-system/speaker-8csps" Sep 29 19:10:37 crc kubenswrapper[4792]: I0929 19:10:37.270355 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/698fb3db-6e7c-478d-bfae-70fbfa85e384-metrics\") pod \"frr-k8s-wrpbj\" (UID: \"698fb3db-6e7c-478d-bfae-70fbfa85e384\") " pod="metallb-system/frr-k8s-wrpbj" Sep 29 19:10:37 crc kubenswrapper[4792]: I0929 19:10:37.270506 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/698fb3db-6e7c-478d-bfae-70fbfa85e384-frr-startup\") pod \"frr-k8s-wrpbj\" (UID: \"698fb3db-6e7c-478d-bfae-70fbfa85e384\") " pod="metallb-system/frr-k8s-wrpbj" Sep 29 19:10:37 crc kubenswrapper[4792]: I0929 19:10:37.270905 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/698fb3db-6e7c-478d-bfae-70fbfa85e384-reloader\") pod \"frr-k8s-wrpbj\" (UID: \"698fb3db-6e7c-478d-bfae-70fbfa85e384\") " pod="metallb-system/frr-k8s-wrpbj" Sep 29 19:10:37 crc kubenswrapper[4792]: I0929 19:10:37.270913 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/698fb3db-6e7c-478d-bfae-70fbfa85e384-frr-conf\") pod \"frr-k8s-wrpbj\" (UID: \"698fb3db-6e7c-478d-bfae-70fbfa85e384\") " pod="metallb-system/frr-k8s-wrpbj" Sep 29 19:10:37 crc kubenswrapper[4792]: I0929 19:10:37.277250 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/698fb3db-6e7c-478d-bfae-70fbfa85e384-metrics-certs\") pod \"frr-k8s-wrpbj\" (UID: \"698fb3db-6e7c-478d-bfae-70fbfa85e384\") " pod="metallb-system/frr-k8s-wrpbj" Sep 29 19:10:37 crc kubenswrapper[4792]: I0929 19:10:37.291965 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5xpvv\" (UniqueName: \"kubernetes.io/projected/698fb3db-6e7c-478d-bfae-70fbfa85e384-kube-api-access-5xpvv\") pod \"frr-k8s-wrpbj\" (UID: \"698fb3db-6e7c-478d-bfae-70fbfa85e384\") " pod="metallb-system/frr-k8s-wrpbj" Sep 29 19:10:37 crc kubenswrapper[4792]: I0929 19:10:37.362222 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-wrpbj" Sep 29 19:10:37 crc kubenswrapper[4792]: I0929 19:10:37.370922 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/e4991702-0228-4b1e-abc9-01d614664746-cert\") pod \"frr-k8s-webhook-server-5478bdb765-fw449\" (UID: \"e4991702-0228-4b1e-abc9-01d614664746\") " pod="metallb-system/frr-k8s-webhook-server-5478bdb765-fw449" Sep 29 19:10:37 crc kubenswrapper[4792]: I0929 19:10:37.370962 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kvtz2\" (UniqueName: \"kubernetes.io/projected/c8d5189a-0868-46dc-881c-077f4d5be810-kube-api-access-kvtz2\") pod \"speaker-8csps\" (UID: \"c8d5189a-0868-46dc-881c-077f4d5be810\") " pod="metallb-system/speaker-8csps" Sep 29 19:10:37 crc kubenswrapper[4792]: I0929 19:10:37.370991 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/af42de4e-dfaa-4178-b742-d4388d56b58a-metrics-certs\") pod \"controller-5d688f5ffc-kjkvm\" (UID: \"af42de4e-dfaa-4178-b742-d4388d56b58a\") " pod="metallb-system/controller-5d688f5ffc-kjkvm" Sep 29 19:10:37 crc kubenswrapper[4792]: I0929 19:10:37.371019 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/c8d5189a-0868-46dc-881c-077f4d5be810-metallb-excludel2\") pod \"speaker-8csps\" (UID: \"c8d5189a-0868-46dc-881c-077f4d5be810\") " pod="metallb-system/speaker-8csps" Sep 29 19:10:37 crc kubenswrapper[4792]: I0929 19:10:37.371036 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c8d5189a-0868-46dc-881c-077f4d5be810-metrics-certs\") pod \"speaker-8csps\" (UID: \"c8d5189a-0868-46dc-881c-077f4d5be810\") " pod="metallb-system/speaker-8csps" Sep 29 19:10:37 crc kubenswrapper[4792]: I0929 19:10:37.371064 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fck7h\" (UniqueName: \"kubernetes.io/projected/af42de4e-dfaa-4178-b742-d4388d56b58a-kube-api-access-fck7h\") pod \"controller-5d688f5ffc-kjkvm\" (UID: \"af42de4e-dfaa-4178-b742-d4388d56b58a\") " pod="metallb-system/controller-5d688f5ffc-kjkvm" Sep 29 19:10:37 crc kubenswrapper[4792]: I0929 19:10:37.371080 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/c8d5189a-0868-46dc-881c-077f4d5be810-memberlist\") pod \"speaker-8csps\" (UID: \"c8d5189a-0868-46dc-881c-077f4d5be810\") " pod="metallb-system/speaker-8csps" Sep 29 19:10:37 crc kubenswrapper[4792]: I0929 19:10:37.371111 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/af42de4e-dfaa-4178-b742-d4388d56b58a-cert\") pod \"controller-5d688f5ffc-kjkvm\" (UID: \"af42de4e-dfaa-4178-b742-d4388d56b58a\") " pod="metallb-system/controller-5d688f5ffc-kjkvm" Sep 29 19:10:37 crc kubenswrapper[4792]: I0929 19:10:37.371137 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mbp7f\" (UniqueName: \"kubernetes.io/projected/e4991702-0228-4b1e-abc9-01d614664746-kube-api-access-mbp7f\") pod \"frr-k8s-webhook-server-5478bdb765-fw449\" (UID: \"e4991702-0228-4b1e-abc9-01d614664746\") " pod="metallb-system/frr-k8s-webhook-server-5478bdb765-fw449" Sep 29 19:10:37 crc kubenswrapper[4792]: E0929 19:10:37.371477 4792 secret.go:188] Couldn't get secret metallb-system/metallb-memberlist: secret "metallb-memberlist" not found Sep 29 19:10:37 crc kubenswrapper[4792]: E0929 19:10:37.371554 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/c8d5189a-0868-46dc-881c-077f4d5be810-memberlist podName:c8d5189a-0868-46dc-881c-077f4d5be810 nodeName:}" failed. No retries permitted until 2025-09-29 19:10:37.87153174 +0000 UTC m=+849.864839256 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "memberlist" (UniqueName: "kubernetes.io/secret/c8d5189a-0868-46dc-881c-077f4d5be810-memberlist") pod "speaker-8csps" (UID: "c8d5189a-0868-46dc-881c-077f4d5be810") : secret "metallb-memberlist" not found Sep 29 19:10:37 crc kubenswrapper[4792]: E0929 19:10:37.371477 4792 secret.go:188] Couldn't get secret metallb-system/speaker-certs-secret: secret "speaker-certs-secret" not found Sep 29 19:10:37 crc kubenswrapper[4792]: E0929 19:10:37.371605 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/c8d5189a-0868-46dc-881c-077f4d5be810-metrics-certs podName:c8d5189a-0868-46dc-881c-077f4d5be810 nodeName:}" failed. No retries permitted until 2025-09-29 19:10:37.871588701 +0000 UTC m=+849.864896097 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/c8d5189a-0868-46dc-881c-077f4d5be810-metrics-certs") pod "speaker-8csps" (UID: "c8d5189a-0868-46dc-881c-077f4d5be810") : secret "speaker-certs-secret" not found Sep 29 19:10:37 crc kubenswrapper[4792]: I0929 19:10:37.371934 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/c8d5189a-0868-46dc-881c-077f4d5be810-metallb-excludel2\") pod \"speaker-8csps\" (UID: \"c8d5189a-0868-46dc-881c-077f4d5be810\") " pod="metallb-system/speaker-8csps" Sep 29 19:10:37 crc kubenswrapper[4792]: I0929 19:10:37.374572 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/e4991702-0228-4b1e-abc9-01d614664746-cert\") pod \"frr-k8s-webhook-server-5478bdb765-fw449\" (UID: \"e4991702-0228-4b1e-abc9-01d614664746\") " pod="metallb-system/frr-k8s-webhook-server-5478bdb765-fw449" Sep 29 19:10:37 crc kubenswrapper[4792]: I0929 19:10:37.393620 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kvtz2\" (UniqueName: \"kubernetes.io/projected/c8d5189a-0868-46dc-881c-077f4d5be810-kube-api-access-kvtz2\") pod \"speaker-8csps\" (UID: \"c8d5189a-0868-46dc-881c-077f4d5be810\") " pod="metallb-system/speaker-8csps" Sep 29 19:10:37 crc kubenswrapper[4792]: I0929 19:10:37.393708 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mbp7f\" (UniqueName: \"kubernetes.io/projected/e4991702-0228-4b1e-abc9-01d614664746-kube-api-access-mbp7f\") pod \"frr-k8s-webhook-server-5478bdb765-fw449\" (UID: \"e4991702-0228-4b1e-abc9-01d614664746\") " pod="metallb-system/frr-k8s-webhook-server-5478bdb765-fw449" Sep 29 19:10:37 crc kubenswrapper[4792]: I0929 19:10:37.395598 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-webhook-server-5478bdb765-fw449" Sep 29 19:10:37 crc kubenswrapper[4792]: I0929 19:10:37.477055 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/af42de4e-dfaa-4178-b742-d4388d56b58a-metrics-certs\") pod \"controller-5d688f5ffc-kjkvm\" (UID: \"af42de4e-dfaa-4178-b742-d4388d56b58a\") " pod="metallb-system/controller-5d688f5ffc-kjkvm" Sep 29 19:10:37 crc kubenswrapper[4792]: I0929 19:10:37.477365 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fck7h\" (UniqueName: \"kubernetes.io/projected/af42de4e-dfaa-4178-b742-d4388d56b58a-kube-api-access-fck7h\") pod \"controller-5d688f5ffc-kjkvm\" (UID: \"af42de4e-dfaa-4178-b742-d4388d56b58a\") " pod="metallb-system/controller-5d688f5ffc-kjkvm" Sep 29 19:10:37 crc kubenswrapper[4792]: I0929 19:10:37.478718 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/af42de4e-dfaa-4178-b742-d4388d56b58a-cert\") pod \"controller-5d688f5ffc-kjkvm\" (UID: \"af42de4e-dfaa-4178-b742-d4388d56b58a\") " pod="metallb-system/controller-5d688f5ffc-kjkvm" Sep 29 19:10:37 crc kubenswrapper[4792]: I0929 19:10:37.480184 4792 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-webhook-cert" Sep 29 19:10:37 crc kubenswrapper[4792]: I0929 19:10:37.486138 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/af42de4e-dfaa-4178-b742-d4388d56b58a-metrics-certs\") pod \"controller-5d688f5ffc-kjkvm\" (UID: \"af42de4e-dfaa-4178-b742-d4388d56b58a\") " pod="metallb-system/controller-5d688f5ffc-kjkvm" Sep 29 19:10:37 crc kubenswrapper[4792]: I0929 19:10:37.492276 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/af42de4e-dfaa-4178-b742-d4388d56b58a-cert\") pod \"controller-5d688f5ffc-kjkvm\" (UID: \"af42de4e-dfaa-4178-b742-d4388d56b58a\") " pod="metallb-system/controller-5d688f5ffc-kjkvm" Sep 29 19:10:37 crc kubenswrapper[4792]: I0929 19:10:37.497566 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fck7h\" (UniqueName: \"kubernetes.io/projected/af42de4e-dfaa-4178-b742-d4388d56b58a-kube-api-access-fck7h\") pod \"controller-5d688f5ffc-kjkvm\" (UID: \"af42de4e-dfaa-4178-b742-d4388d56b58a\") " pod="metallb-system/controller-5d688f5ffc-kjkvm" Sep 29 19:10:37 crc kubenswrapper[4792]: I0929 19:10:37.501113 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/controller-5d688f5ffc-kjkvm" Sep 29 19:10:37 crc kubenswrapper[4792]: I0929 19:10:37.818958 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/frr-k8s-webhook-server-5478bdb765-fw449"] Sep 29 19:10:37 crc kubenswrapper[4792]: W0929 19:10:37.826282 4792 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode4991702_0228_4b1e_abc9_01d614664746.slice/crio-1096c2dd3f9b9e7e6a4bbd207457b27b9677920f74fafa33aa8bbe41770f11d8 WatchSource:0}: Error finding container 1096c2dd3f9b9e7e6a4bbd207457b27b9677920f74fafa33aa8bbe41770f11d8: Status 404 returned error can't find the container with id 1096c2dd3f9b9e7e6a4bbd207457b27b9677920f74fafa33aa8bbe41770f11d8 Sep 29 19:10:37 crc kubenswrapper[4792]: I0929 19:10:37.884200 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c8d5189a-0868-46dc-881c-077f4d5be810-metrics-certs\") pod \"speaker-8csps\" (UID: \"c8d5189a-0868-46dc-881c-077f4d5be810\") " pod="metallb-system/speaker-8csps" Sep 29 19:10:37 crc kubenswrapper[4792]: I0929 19:10:37.884251 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/c8d5189a-0868-46dc-881c-077f4d5be810-memberlist\") pod \"speaker-8csps\" (UID: \"c8d5189a-0868-46dc-881c-077f4d5be810\") " pod="metallb-system/speaker-8csps" Sep 29 19:10:37 crc kubenswrapper[4792]: E0929 19:10:37.884350 4792 secret.go:188] Couldn't get secret metallb-system/metallb-memberlist: secret "metallb-memberlist" not found Sep 29 19:10:37 crc kubenswrapper[4792]: E0929 19:10:37.884396 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/c8d5189a-0868-46dc-881c-077f4d5be810-memberlist podName:c8d5189a-0868-46dc-881c-077f4d5be810 nodeName:}" failed. No retries permitted until 2025-09-29 19:10:38.884382355 +0000 UTC m=+850.877689751 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "memberlist" (UniqueName: "kubernetes.io/secret/c8d5189a-0868-46dc-881c-077f4d5be810-memberlist") pod "speaker-8csps" (UID: "c8d5189a-0868-46dc-881c-077f4d5be810") : secret "metallb-memberlist" not found Sep 29 19:10:37 crc kubenswrapper[4792]: I0929 19:10:37.889020 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c8d5189a-0868-46dc-881c-077f4d5be810-metrics-certs\") pod \"speaker-8csps\" (UID: \"c8d5189a-0868-46dc-881c-077f4d5be810\") " pod="metallb-system/speaker-8csps" Sep 29 19:10:37 crc kubenswrapper[4792]: I0929 19:10:37.892898 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/controller-5d688f5ffc-kjkvm"] Sep 29 19:10:37 crc kubenswrapper[4792]: W0929 19:10:37.899483 4792 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podaf42de4e_dfaa_4178_b742_d4388d56b58a.slice/crio-c8342f984366e17e1c93ef42928bb6afc36c10bd59baf2ab3a187cd408f5d227 WatchSource:0}: Error finding container c8342f984366e17e1c93ef42928bb6afc36c10bd59baf2ab3a187cd408f5d227: Status 404 returned error can't find the container with id c8342f984366e17e1c93ef42928bb6afc36c10bd59baf2ab3a187cd408f5d227 Sep 29 19:10:38 crc kubenswrapper[4792]: I0929 19:10:38.195552 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-webhook-server-5478bdb765-fw449" event={"ID":"e4991702-0228-4b1e-abc9-01d614664746","Type":"ContainerStarted","Data":"1096c2dd3f9b9e7e6a4bbd207457b27b9677920f74fafa33aa8bbe41770f11d8"} Sep 29 19:10:38 crc kubenswrapper[4792]: I0929 19:10:38.197338 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-5d688f5ffc-kjkvm" event={"ID":"af42de4e-dfaa-4178-b742-d4388d56b58a","Type":"ContainerStarted","Data":"1ad381f6c998f81763e80da0750e2c353ba87a378a490e2484b5bcb6f9ee5377"} Sep 29 19:10:38 crc kubenswrapper[4792]: I0929 19:10:38.197389 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-5d688f5ffc-kjkvm" event={"ID":"af42de4e-dfaa-4178-b742-d4388d56b58a","Type":"ContainerStarted","Data":"ecf27e95265c9b77134781b8b53fe92438a4dd052ad3d6cd46b3e7c5dd2d7eb2"} Sep 29 19:10:38 crc kubenswrapper[4792]: I0929 19:10:38.197402 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-5d688f5ffc-kjkvm" event={"ID":"af42de4e-dfaa-4178-b742-d4388d56b58a","Type":"ContainerStarted","Data":"c8342f984366e17e1c93ef42928bb6afc36c10bd59baf2ab3a187cd408f5d227"} Sep 29 19:10:38 crc kubenswrapper[4792]: I0929 19:10:38.197445 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/controller-5d688f5ffc-kjkvm" Sep 29 19:10:38 crc kubenswrapper[4792]: I0929 19:10:38.198210 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-wrpbj" event={"ID":"698fb3db-6e7c-478d-bfae-70fbfa85e384","Type":"ContainerStarted","Data":"53be73104f748b9c4d231d67d179c3346a1308625a9a8be6196c86c326135058"} Sep 29 19:10:38 crc kubenswrapper[4792]: I0929 19:10:38.214457 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/controller-5d688f5ffc-kjkvm" podStartSLOduration=1.2144413809999999 podStartE2EDuration="1.214441381s" podCreationTimestamp="2025-09-29 19:10:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 19:10:38.211815341 +0000 UTC m=+850.205122747" watchObservedRunningTime="2025-09-29 19:10:38.214441381 +0000 UTC m=+850.207748777" Sep 29 19:10:38 crc kubenswrapper[4792]: I0929 19:10:38.898111 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/c8d5189a-0868-46dc-881c-077f4d5be810-memberlist\") pod \"speaker-8csps\" (UID: \"c8d5189a-0868-46dc-881c-077f4d5be810\") " pod="metallb-system/speaker-8csps" Sep 29 19:10:38 crc kubenswrapper[4792]: I0929 19:10:38.906441 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/c8d5189a-0868-46dc-881c-077f4d5be810-memberlist\") pod \"speaker-8csps\" (UID: \"c8d5189a-0868-46dc-881c-077f4d5be810\") " pod="metallb-system/speaker-8csps" Sep 29 19:10:38 crc kubenswrapper[4792]: I0929 19:10:38.979479 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/speaker-8csps" Sep 29 19:10:39 crc kubenswrapper[4792]: I0929 19:10:39.214654 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-8csps" event={"ID":"c8d5189a-0868-46dc-881c-077f4d5be810","Type":"ContainerStarted","Data":"7800214275ac5c77034506e1859db5da9e2de61591f4f526e0a5a573a3537979"} Sep 29 19:10:40 crc kubenswrapper[4792]: I0929 19:10:40.227789 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-8csps" event={"ID":"c8d5189a-0868-46dc-881c-077f4d5be810","Type":"ContainerStarted","Data":"df9a61ece7cf6cf5bda04acbcc2223930c4751446d9714ef8d8f584dc6d95f66"} Sep 29 19:10:40 crc kubenswrapper[4792]: I0929 19:10:40.228105 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/speaker-8csps" Sep 29 19:10:40 crc kubenswrapper[4792]: I0929 19:10:40.228116 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-8csps" event={"ID":"c8d5189a-0868-46dc-881c-077f4d5be810","Type":"ContainerStarted","Data":"fd342613579f0535aa1635296d2ae080870ee8e70a33d9dc194dbc5204f97cf0"} Sep 29 19:10:40 crc kubenswrapper[4792]: I0929 19:10:40.268907 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/speaker-8csps" podStartSLOduration=3.268891225 podStartE2EDuration="3.268891225s" podCreationTimestamp="2025-09-29 19:10:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 19:10:40.263781769 +0000 UTC m=+852.257089165" watchObservedRunningTime="2025-09-29 19:10:40.268891225 +0000 UTC m=+852.262198621" Sep 29 19:10:41 crc kubenswrapper[4792]: I0929 19:10:41.959326 4792 patch_prober.go:28] interesting pod/machine-config-daemon-p5q59 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 19:10:41 crc kubenswrapper[4792]: I0929 19:10:41.959668 4792 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" podUID="0ae66548-086e-4ca9-bd6f-281ce46e7557" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 19:10:41 crc kubenswrapper[4792]: I0929 19:10:41.959733 4792 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" Sep 29 19:10:41 crc kubenswrapper[4792]: I0929 19:10:41.960423 4792 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"487246f4f6005415a540bc4c228e6bec5b9bf5f447044923f1e106cf7a0cba67"} pod="openshift-machine-config-operator/machine-config-daemon-p5q59" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 29 19:10:41 crc kubenswrapper[4792]: I0929 19:10:41.960493 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" podUID="0ae66548-086e-4ca9-bd6f-281ce46e7557" containerName="machine-config-daemon" containerID="cri-o://487246f4f6005415a540bc4c228e6bec5b9bf5f447044923f1e106cf7a0cba67" gracePeriod=600 Sep 29 19:10:42 crc kubenswrapper[4792]: I0929 19:10:42.249596 4792 generic.go:334] "Generic (PLEG): container finished" podID="0ae66548-086e-4ca9-bd6f-281ce46e7557" containerID="487246f4f6005415a540bc4c228e6bec5b9bf5f447044923f1e106cf7a0cba67" exitCode=0 Sep 29 19:10:42 crc kubenswrapper[4792]: I0929 19:10:42.249639 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" event={"ID":"0ae66548-086e-4ca9-bd6f-281ce46e7557","Type":"ContainerDied","Data":"487246f4f6005415a540bc4c228e6bec5b9bf5f447044923f1e106cf7a0cba67"} Sep 29 19:10:42 crc kubenswrapper[4792]: I0929 19:10:42.249671 4792 scope.go:117] "RemoveContainer" containerID="28e9b336e995bb00d35a92fadb8e3b916142bf2b43240549bb14e32ddcc21015" Sep 29 19:10:43 crc kubenswrapper[4792]: I0929 19:10:43.258646 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" event={"ID":"0ae66548-086e-4ca9-bd6f-281ce46e7557","Type":"ContainerStarted","Data":"5fe4636f526132681f79866adf93cfab5bd3a4171ad63c289794ff569221d1f4"} Sep 29 19:10:46 crc kubenswrapper[4792]: I0929 19:10:46.274507 4792 generic.go:334] "Generic (PLEG): container finished" podID="698fb3db-6e7c-478d-bfae-70fbfa85e384" containerID="153713cb93ff868c314450b22e93abf4c3beeab187dc0702bda2e6f5ab14c470" exitCode=0 Sep 29 19:10:46 crc kubenswrapper[4792]: I0929 19:10:46.274616 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-wrpbj" event={"ID":"698fb3db-6e7c-478d-bfae-70fbfa85e384","Type":"ContainerDied","Data":"153713cb93ff868c314450b22e93abf4c3beeab187dc0702bda2e6f5ab14c470"} Sep 29 19:10:46 crc kubenswrapper[4792]: I0929 19:10:46.277114 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-webhook-server-5478bdb765-fw449" event={"ID":"e4991702-0228-4b1e-abc9-01d614664746","Type":"ContainerStarted","Data":"accb2039373e8d2c6dff5e11c54e3da2ef98ba918a65e6d56860292fee7d92dc"} Sep 29 19:10:46 crc kubenswrapper[4792]: I0929 19:10:46.277232 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/frr-k8s-webhook-server-5478bdb765-fw449" Sep 29 19:10:47 crc kubenswrapper[4792]: I0929 19:10:47.287120 4792 generic.go:334] "Generic (PLEG): container finished" podID="698fb3db-6e7c-478d-bfae-70fbfa85e384" containerID="476c31f52811461a2b3de45274480d2c1239df9c7480237fd065f597cfa5c544" exitCode=0 Sep 29 19:10:47 crc kubenswrapper[4792]: I0929 19:10:47.287164 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-wrpbj" event={"ID":"698fb3db-6e7c-478d-bfae-70fbfa85e384","Type":"ContainerDied","Data":"476c31f52811461a2b3de45274480d2c1239df9c7480237fd065f597cfa5c544"} Sep 29 19:10:47 crc kubenswrapper[4792]: I0929 19:10:47.319068 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/frr-k8s-webhook-server-5478bdb765-fw449" podStartSLOduration=2.383836074 podStartE2EDuration="10.319046295s" podCreationTimestamp="2025-09-29 19:10:37 +0000 UTC" firstStartedPulling="2025-09-29 19:10:37.829809545 +0000 UTC m=+849.823116941" lastFinishedPulling="2025-09-29 19:10:45.765019766 +0000 UTC m=+857.758327162" observedRunningTime="2025-09-29 19:10:46.367268869 +0000 UTC m=+858.360576265" watchObservedRunningTime="2025-09-29 19:10:47.319046295 +0000 UTC m=+859.312353691" Sep 29 19:10:48 crc kubenswrapper[4792]: I0929 19:10:48.295254 4792 generic.go:334] "Generic (PLEG): container finished" podID="698fb3db-6e7c-478d-bfae-70fbfa85e384" containerID="4b38a2438aea62a7986bebb4b89ccb7c2364fe1152e99f902e205f20a5b81d5a" exitCode=0 Sep 29 19:10:48 crc kubenswrapper[4792]: I0929 19:10:48.295313 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-wrpbj" event={"ID":"698fb3db-6e7c-478d-bfae-70fbfa85e384","Type":"ContainerDied","Data":"4b38a2438aea62a7986bebb4b89ccb7c2364fe1152e99f902e205f20a5b81d5a"} Sep 29 19:10:49 crc kubenswrapper[4792]: I0929 19:10:49.312587 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-wrpbj" event={"ID":"698fb3db-6e7c-478d-bfae-70fbfa85e384","Type":"ContainerStarted","Data":"d150a0379d5f7ead737a356f496af939e8061fc37bd7b3a4f57d5a55ee816a01"} Sep 29 19:10:49 crc kubenswrapper[4792]: I0929 19:10:49.312963 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/frr-k8s-wrpbj" Sep 29 19:10:49 crc kubenswrapper[4792]: I0929 19:10:49.312978 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-wrpbj" event={"ID":"698fb3db-6e7c-478d-bfae-70fbfa85e384","Type":"ContainerStarted","Data":"61410968c0ebf156a598ef8b5d21e229518cb4fc545cb2e8f2418fecb6aff85f"} Sep 29 19:10:49 crc kubenswrapper[4792]: I0929 19:10:49.312990 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-wrpbj" event={"ID":"698fb3db-6e7c-478d-bfae-70fbfa85e384","Type":"ContainerStarted","Data":"cfad224da723f9d01a434cf7b776d126c2464167b50f818ff832ba9aca5c72ee"} Sep 29 19:10:49 crc kubenswrapper[4792]: I0929 19:10:49.313005 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-wrpbj" event={"ID":"698fb3db-6e7c-478d-bfae-70fbfa85e384","Type":"ContainerStarted","Data":"8b55ec1a8d74e71a586a7d50cc9b459412ce8f371a603440a026cd82a54479f5"} Sep 29 19:10:49 crc kubenswrapper[4792]: I0929 19:10:49.313020 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-wrpbj" event={"ID":"698fb3db-6e7c-478d-bfae-70fbfa85e384","Type":"ContainerStarted","Data":"6acd17c163fd66518c619451d74c4933ee4b1886c7d92ff040c37f6bdb8e9269"} Sep 29 19:10:49 crc kubenswrapper[4792]: I0929 19:10:49.313034 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-wrpbj" event={"ID":"698fb3db-6e7c-478d-bfae-70fbfa85e384","Type":"ContainerStarted","Data":"0d7dd52c8dd82a0b8939284aca514fd3a0c023a3784fb19bbc1419cb1c4a221d"} Sep 29 19:10:52 crc kubenswrapper[4792]: I0929 19:10:52.363101 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="metallb-system/frr-k8s-wrpbj" Sep 29 19:10:52 crc kubenswrapper[4792]: I0929 19:10:52.400438 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="metallb-system/frr-k8s-wrpbj" Sep 29 19:10:52 crc kubenswrapper[4792]: I0929 19:10:52.424234 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/frr-k8s-wrpbj" podStartSLOduration=8.184316438 podStartE2EDuration="16.424215872s" podCreationTimestamp="2025-09-29 19:10:36 +0000 UTC" firstStartedPulling="2025-09-29 19:10:37.549271245 +0000 UTC m=+849.542578641" lastFinishedPulling="2025-09-29 19:10:45.789170689 +0000 UTC m=+857.782478075" observedRunningTime="2025-09-29 19:10:49.346237225 +0000 UTC m=+861.339544661" watchObservedRunningTime="2025-09-29 19:10:52.424215872 +0000 UTC m=+864.417523278" Sep 29 19:10:57 crc kubenswrapper[4792]: I0929 19:10:57.399802 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/frr-k8s-webhook-server-5478bdb765-fw449" Sep 29 19:10:57 crc kubenswrapper[4792]: I0929 19:10:57.505014 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/controller-5d688f5ffc-kjkvm" Sep 29 19:10:58 crc kubenswrapper[4792]: I0929 19:10:58.984645 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/speaker-8csps" Sep 29 19:11:01 crc kubenswrapper[4792]: I0929 19:11:01.965199 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-index-txm8n"] Sep 29 19:11:01 crc kubenswrapper[4792]: I0929 19:11:01.966089 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-txm8n" Sep 29 19:11:01 crc kubenswrapper[4792]: I0929 19:11:01.970375 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-operators"/"openshift-service-ca.crt" Sep 29 19:11:01 crc kubenswrapper[4792]: I0929 19:11:01.970380 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-index-dockercfg-rjln2" Sep 29 19:11:01 crc kubenswrapper[4792]: I0929 19:11:01.970737 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-operators"/"kube-root-ca.crt" Sep 29 19:11:02 crc kubenswrapper[4792]: I0929 19:11:02.018506 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2x9jh\" (UniqueName: \"kubernetes.io/projected/65a5821f-c139-40df-84bf-0bdefe91b013-kube-api-access-2x9jh\") pod \"openstack-operator-index-txm8n\" (UID: \"65a5821f-c139-40df-84bf-0bdefe91b013\") " pod="openstack-operators/openstack-operator-index-txm8n" Sep 29 19:11:02 crc kubenswrapper[4792]: I0929 19:11:02.034366 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-txm8n"] Sep 29 19:11:02 crc kubenswrapper[4792]: I0929 19:11:02.119210 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2x9jh\" (UniqueName: \"kubernetes.io/projected/65a5821f-c139-40df-84bf-0bdefe91b013-kube-api-access-2x9jh\") pod \"openstack-operator-index-txm8n\" (UID: \"65a5821f-c139-40df-84bf-0bdefe91b013\") " pod="openstack-operators/openstack-operator-index-txm8n" Sep 29 19:11:02 crc kubenswrapper[4792]: I0929 19:11:02.138948 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2x9jh\" (UniqueName: \"kubernetes.io/projected/65a5821f-c139-40df-84bf-0bdefe91b013-kube-api-access-2x9jh\") pod \"openstack-operator-index-txm8n\" (UID: \"65a5821f-c139-40df-84bf-0bdefe91b013\") " pod="openstack-operators/openstack-operator-index-txm8n" Sep 29 19:11:02 crc kubenswrapper[4792]: I0929 19:11:02.281627 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-txm8n" Sep 29 19:11:02 crc kubenswrapper[4792]: I0929 19:11:02.691980 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-txm8n"] Sep 29 19:11:03 crc kubenswrapper[4792]: I0929 19:11:03.392065 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-txm8n" event={"ID":"65a5821f-c139-40df-84bf-0bdefe91b013","Type":"ContainerStarted","Data":"74a6ab6d43b128b7a8dda323477b872a514e8cfd1c391f277aeb1086b914b2e9"} Sep 29 19:11:05 crc kubenswrapper[4792]: I0929 19:11:05.140670 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/openstack-operator-index-txm8n"] Sep 29 19:11:05 crc kubenswrapper[4792]: I0929 19:11:05.403867 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-txm8n" event={"ID":"65a5821f-c139-40df-84bf-0bdefe91b013","Type":"ContainerStarted","Data":"a56ae038a40e35b576ce6563dbe88e88092fd043f5b30fc448e7cf3056fd1bc9"} Sep 29 19:11:05 crc kubenswrapper[4792]: I0929 19:11:05.744974 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-index-txm8n" podStartSLOduration=2.561967654 podStartE2EDuration="4.744951805s" podCreationTimestamp="2025-09-29 19:11:01 +0000 UTC" firstStartedPulling="2025-09-29 19:11:02.695298901 +0000 UTC m=+874.688606297" lastFinishedPulling="2025-09-29 19:11:04.878283052 +0000 UTC m=+876.871590448" observedRunningTime="2025-09-29 19:11:05.420963361 +0000 UTC m=+877.414270767" watchObservedRunningTime="2025-09-29 19:11:05.744951805 +0000 UTC m=+877.738259201" Sep 29 19:11:05 crc kubenswrapper[4792]: I0929 19:11:05.746536 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-index-9z9w7"] Sep 29 19:11:05 crc kubenswrapper[4792]: I0929 19:11:05.747427 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-9z9w7" Sep 29 19:11:05 crc kubenswrapper[4792]: I0929 19:11:05.762591 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-9z9w7"] Sep 29 19:11:05 crc kubenswrapper[4792]: I0929 19:11:05.864490 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sxlhz\" (UniqueName: \"kubernetes.io/projected/b589f424-730a-4e38-8dfd-c1229f055e2a-kube-api-access-sxlhz\") pod \"openstack-operator-index-9z9w7\" (UID: \"b589f424-730a-4e38-8dfd-c1229f055e2a\") " pod="openstack-operators/openstack-operator-index-9z9w7" Sep 29 19:11:05 crc kubenswrapper[4792]: I0929 19:11:05.965548 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sxlhz\" (UniqueName: \"kubernetes.io/projected/b589f424-730a-4e38-8dfd-c1229f055e2a-kube-api-access-sxlhz\") pod \"openstack-operator-index-9z9w7\" (UID: \"b589f424-730a-4e38-8dfd-c1229f055e2a\") " pod="openstack-operators/openstack-operator-index-9z9w7" Sep 29 19:11:05 crc kubenswrapper[4792]: I0929 19:11:05.985688 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sxlhz\" (UniqueName: \"kubernetes.io/projected/b589f424-730a-4e38-8dfd-c1229f055e2a-kube-api-access-sxlhz\") pod \"openstack-operator-index-9z9w7\" (UID: \"b589f424-730a-4e38-8dfd-c1229f055e2a\") " pod="openstack-operators/openstack-operator-index-9z9w7" Sep 29 19:11:06 crc kubenswrapper[4792]: I0929 19:11:06.066001 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-9z9w7" Sep 29 19:11:06 crc kubenswrapper[4792]: I0929 19:11:06.409885 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-operators/openstack-operator-index-txm8n" podUID="65a5821f-c139-40df-84bf-0bdefe91b013" containerName="registry-server" containerID="cri-o://a56ae038a40e35b576ce6563dbe88e88092fd043f5b30fc448e7cf3056fd1bc9" gracePeriod=2 Sep 29 19:11:06 crc kubenswrapper[4792]: I0929 19:11:06.478667 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-9z9w7"] Sep 29 19:11:06 crc kubenswrapper[4792]: I0929 19:11:06.710213 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-txm8n" Sep 29 19:11:06 crc kubenswrapper[4792]: I0929 19:11:06.875608 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2x9jh\" (UniqueName: \"kubernetes.io/projected/65a5821f-c139-40df-84bf-0bdefe91b013-kube-api-access-2x9jh\") pod \"65a5821f-c139-40df-84bf-0bdefe91b013\" (UID: \"65a5821f-c139-40df-84bf-0bdefe91b013\") " Sep 29 19:11:06 crc kubenswrapper[4792]: I0929 19:11:06.880922 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/65a5821f-c139-40df-84bf-0bdefe91b013-kube-api-access-2x9jh" (OuterVolumeSpecName: "kube-api-access-2x9jh") pod "65a5821f-c139-40df-84bf-0bdefe91b013" (UID: "65a5821f-c139-40df-84bf-0bdefe91b013"). InnerVolumeSpecName "kube-api-access-2x9jh". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:11:06 crc kubenswrapper[4792]: I0929 19:11:06.977982 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2x9jh\" (UniqueName: \"kubernetes.io/projected/65a5821f-c139-40df-84bf-0bdefe91b013-kube-api-access-2x9jh\") on node \"crc\" DevicePath \"\"" Sep 29 19:11:07 crc kubenswrapper[4792]: I0929 19:11:07.369185 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/frr-k8s-wrpbj" Sep 29 19:11:07 crc kubenswrapper[4792]: I0929 19:11:07.422776 4792 generic.go:334] "Generic (PLEG): container finished" podID="65a5821f-c139-40df-84bf-0bdefe91b013" containerID="a56ae038a40e35b576ce6563dbe88e88092fd043f5b30fc448e7cf3056fd1bc9" exitCode=0 Sep 29 19:11:07 crc kubenswrapper[4792]: I0929 19:11:07.422842 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-txm8n" event={"ID":"65a5821f-c139-40df-84bf-0bdefe91b013","Type":"ContainerDied","Data":"a56ae038a40e35b576ce6563dbe88e88092fd043f5b30fc448e7cf3056fd1bc9"} Sep 29 19:11:07 crc kubenswrapper[4792]: I0929 19:11:07.422907 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-txm8n" event={"ID":"65a5821f-c139-40df-84bf-0bdefe91b013","Type":"ContainerDied","Data":"74a6ab6d43b128b7a8dda323477b872a514e8cfd1c391f277aeb1086b914b2e9"} Sep 29 19:11:07 crc kubenswrapper[4792]: I0929 19:11:07.422928 4792 scope.go:117] "RemoveContainer" containerID="a56ae038a40e35b576ce6563dbe88e88092fd043f5b30fc448e7cf3056fd1bc9" Sep 29 19:11:07 crc kubenswrapper[4792]: I0929 19:11:07.423029 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-txm8n" Sep 29 19:11:07 crc kubenswrapper[4792]: I0929 19:11:07.425931 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-9z9w7" event={"ID":"b589f424-730a-4e38-8dfd-c1229f055e2a","Type":"ContainerStarted","Data":"e54d36061c407b15869f065b59acf7b7024c47c72261ba78bae31671ecbf65ff"} Sep 29 19:11:07 crc kubenswrapper[4792]: I0929 19:11:07.425954 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-9z9w7" event={"ID":"b589f424-730a-4e38-8dfd-c1229f055e2a","Type":"ContainerStarted","Data":"1bbc7d87498f45535401bfa6dbed5df9f2e59514328217c5bf87d9cf50739735"} Sep 29 19:11:07 crc kubenswrapper[4792]: I0929 19:11:07.446376 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-index-9z9w7" podStartSLOduration=2.381671003 podStartE2EDuration="2.446358753s" podCreationTimestamp="2025-09-29 19:11:05 +0000 UTC" firstStartedPulling="2025-09-29 19:11:06.491959407 +0000 UTC m=+878.485266803" lastFinishedPulling="2025-09-29 19:11:06.556647157 +0000 UTC m=+878.549954553" observedRunningTime="2025-09-29 19:11:07.444186195 +0000 UTC m=+879.437493601" watchObservedRunningTime="2025-09-29 19:11:07.446358753 +0000 UTC m=+879.439666159" Sep 29 19:11:07 crc kubenswrapper[4792]: I0929 19:11:07.447422 4792 scope.go:117] "RemoveContainer" containerID="a56ae038a40e35b576ce6563dbe88e88092fd043f5b30fc448e7cf3056fd1bc9" Sep 29 19:11:07 crc kubenswrapper[4792]: E0929 19:11:07.448042 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a56ae038a40e35b576ce6563dbe88e88092fd043f5b30fc448e7cf3056fd1bc9\": container with ID starting with a56ae038a40e35b576ce6563dbe88e88092fd043f5b30fc448e7cf3056fd1bc9 not found: ID does not exist" containerID="a56ae038a40e35b576ce6563dbe88e88092fd043f5b30fc448e7cf3056fd1bc9" Sep 29 19:11:07 crc kubenswrapper[4792]: I0929 19:11:07.448129 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a56ae038a40e35b576ce6563dbe88e88092fd043f5b30fc448e7cf3056fd1bc9"} err="failed to get container status \"a56ae038a40e35b576ce6563dbe88e88092fd043f5b30fc448e7cf3056fd1bc9\": rpc error: code = NotFound desc = could not find container \"a56ae038a40e35b576ce6563dbe88e88092fd043f5b30fc448e7cf3056fd1bc9\": container with ID starting with a56ae038a40e35b576ce6563dbe88e88092fd043f5b30fc448e7cf3056fd1bc9 not found: ID does not exist" Sep 29 19:11:07 crc kubenswrapper[4792]: I0929 19:11:07.463033 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/openstack-operator-index-txm8n"] Sep 29 19:11:07 crc kubenswrapper[4792]: I0929 19:11:07.466917 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/openstack-operator-index-txm8n"] Sep 29 19:11:09 crc kubenswrapper[4792]: I0929 19:11:09.027088 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="65a5821f-c139-40df-84bf-0bdefe91b013" path="/var/lib/kubelet/pods/65a5821f-c139-40df-84bf-0bdefe91b013/volumes" Sep 29 19:11:16 crc kubenswrapper[4792]: I0929 19:11:16.066605 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-operators/openstack-operator-index-9z9w7" Sep 29 19:11:16 crc kubenswrapper[4792]: I0929 19:11:16.067445 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-index-9z9w7" Sep 29 19:11:16 crc kubenswrapper[4792]: I0929 19:11:16.098582 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-operators/openstack-operator-index-9z9w7" Sep 29 19:11:16 crc kubenswrapper[4792]: I0929 19:11:16.506999 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-index-9z9w7" Sep 29 19:11:22 crc kubenswrapper[4792]: I0929 19:11:22.036131 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/2a375d0d1c57dc6ca715bf6094605ae633b88b22c8703fb4672d18d3ca9jqrr"] Sep 29 19:11:22 crc kubenswrapper[4792]: E0929 19:11:22.038258 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="65a5821f-c139-40df-84bf-0bdefe91b013" containerName="registry-server" Sep 29 19:11:22 crc kubenswrapper[4792]: I0929 19:11:22.038724 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="65a5821f-c139-40df-84bf-0bdefe91b013" containerName="registry-server" Sep 29 19:11:22 crc kubenswrapper[4792]: I0929 19:11:22.039081 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="65a5821f-c139-40df-84bf-0bdefe91b013" containerName="registry-server" Sep 29 19:11:22 crc kubenswrapper[4792]: I0929 19:11:22.040745 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/2a375d0d1c57dc6ca715bf6094605ae633b88b22c8703fb4672d18d3ca9jqrr" Sep 29 19:11:22 crc kubenswrapper[4792]: I0929 19:11:22.044353 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"default-dockercfg-vntvp" Sep 29 19:11:22 crc kubenswrapper[4792]: I0929 19:11:22.049309 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/2a375d0d1c57dc6ca715bf6094605ae633b88b22c8703fb4672d18d3ca9jqrr"] Sep 29 19:11:22 crc kubenswrapper[4792]: I0929 19:11:22.088273 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/0c02843e-3b00-4af6-8f78-d46dc77f427d-bundle\") pod \"2a375d0d1c57dc6ca715bf6094605ae633b88b22c8703fb4672d18d3ca9jqrr\" (UID: \"0c02843e-3b00-4af6-8f78-d46dc77f427d\") " pod="openstack-operators/2a375d0d1c57dc6ca715bf6094605ae633b88b22c8703fb4672d18d3ca9jqrr" Sep 29 19:11:22 crc kubenswrapper[4792]: I0929 19:11:22.088360 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2b67j\" (UniqueName: \"kubernetes.io/projected/0c02843e-3b00-4af6-8f78-d46dc77f427d-kube-api-access-2b67j\") pod \"2a375d0d1c57dc6ca715bf6094605ae633b88b22c8703fb4672d18d3ca9jqrr\" (UID: \"0c02843e-3b00-4af6-8f78-d46dc77f427d\") " pod="openstack-operators/2a375d0d1c57dc6ca715bf6094605ae633b88b22c8703fb4672d18d3ca9jqrr" Sep 29 19:11:22 crc kubenswrapper[4792]: I0929 19:11:22.088522 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/0c02843e-3b00-4af6-8f78-d46dc77f427d-util\") pod \"2a375d0d1c57dc6ca715bf6094605ae633b88b22c8703fb4672d18d3ca9jqrr\" (UID: \"0c02843e-3b00-4af6-8f78-d46dc77f427d\") " pod="openstack-operators/2a375d0d1c57dc6ca715bf6094605ae633b88b22c8703fb4672d18d3ca9jqrr" Sep 29 19:11:22 crc kubenswrapper[4792]: I0929 19:11:22.189487 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/0c02843e-3b00-4af6-8f78-d46dc77f427d-bundle\") pod \"2a375d0d1c57dc6ca715bf6094605ae633b88b22c8703fb4672d18d3ca9jqrr\" (UID: \"0c02843e-3b00-4af6-8f78-d46dc77f427d\") " pod="openstack-operators/2a375d0d1c57dc6ca715bf6094605ae633b88b22c8703fb4672d18d3ca9jqrr" Sep 29 19:11:22 crc kubenswrapper[4792]: I0929 19:11:22.189574 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2b67j\" (UniqueName: \"kubernetes.io/projected/0c02843e-3b00-4af6-8f78-d46dc77f427d-kube-api-access-2b67j\") pod \"2a375d0d1c57dc6ca715bf6094605ae633b88b22c8703fb4672d18d3ca9jqrr\" (UID: \"0c02843e-3b00-4af6-8f78-d46dc77f427d\") " pod="openstack-operators/2a375d0d1c57dc6ca715bf6094605ae633b88b22c8703fb4672d18d3ca9jqrr" Sep 29 19:11:22 crc kubenswrapper[4792]: I0929 19:11:22.189638 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/0c02843e-3b00-4af6-8f78-d46dc77f427d-util\") pod \"2a375d0d1c57dc6ca715bf6094605ae633b88b22c8703fb4672d18d3ca9jqrr\" (UID: \"0c02843e-3b00-4af6-8f78-d46dc77f427d\") " pod="openstack-operators/2a375d0d1c57dc6ca715bf6094605ae633b88b22c8703fb4672d18d3ca9jqrr" Sep 29 19:11:22 crc kubenswrapper[4792]: I0929 19:11:22.190171 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/0c02843e-3b00-4af6-8f78-d46dc77f427d-bundle\") pod \"2a375d0d1c57dc6ca715bf6094605ae633b88b22c8703fb4672d18d3ca9jqrr\" (UID: \"0c02843e-3b00-4af6-8f78-d46dc77f427d\") " pod="openstack-operators/2a375d0d1c57dc6ca715bf6094605ae633b88b22c8703fb4672d18d3ca9jqrr" Sep 29 19:11:22 crc kubenswrapper[4792]: I0929 19:11:22.190232 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/0c02843e-3b00-4af6-8f78-d46dc77f427d-util\") pod \"2a375d0d1c57dc6ca715bf6094605ae633b88b22c8703fb4672d18d3ca9jqrr\" (UID: \"0c02843e-3b00-4af6-8f78-d46dc77f427d\") " pod="openstack-operators/2a375d0d1c57dc6ca715bf6094605ae633b88b22c8703fb4672d18d3ca9jqrr" Sep 29 19:11:22 crc kubenswrapper[4792]: I0929 19:11:22.212250 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2b67j\" (UniqueName: \"kubernetes.io/projected/0c02843e-3b00-4af6-8f78-d46dc77f427d-kube-api-access-2b67j\") pod \"2a375d0d1c57dc6ca715bf6094605ae633b88b22c8703fb4672d18d3ca9jqrr\" (UID: \"0c02843e-3b00-4af6-8f78-d46dc77f427d\") " pod="openstack-operators/2a375d0d1c57dc6ca715bf6094605ae633b88b22c8703fb4672d18d3ca9jqrr" Sep 29 19:11:22 crc kubenswrapper[4792]: I0929 19:11:22.371197 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/2a375d0d1c57dc6ca715bf6094605ae633b88b22c8703fb4672d18d3ca9jqrr" Sep 29 19:11:22 crc kubenswrapper[4792]: I0929 19:11:22.785686 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/2a375d0d1c57dc6ca715bf6094605ae633b88b22c8703fb4672d18d3ca9jqrr"] Sep 29 19:11:23 crc kubenswrapper[4792]: I0929 19:11:23.532080 4792 generic.go:334] "Generic (PLEG): container finished" podID="0c02843e-3b00-4af6-8f78-d46dc77f427d" containerID="b38f115538a8d9355eb19a3804e4cf6c1ac3f77180cb522d7c91ec77c1ac2e7c" exitCode=0 Sep 29 19:11:23 crc kubenswrapper[4792]: I0929 19:11:23.532169 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/2a375d0d1c57dc6ca715bf6094605ae633b88b22c8703fb4672d18d3ca9jqrr" event={"ID":"0c02843e-3b00-4af6-8f78-d46dc77f427d","Type":"ContainerDied","Data":"b38f115538a8d9355eb19a3804e4cf6c1ac3f77180cb522d7c91ec77c1ac2e7c"} Sep 29 19:11:23 crc kubenswrapper[4792]: I0929 19:11:23.532366 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/2a375d0d1c57dc6ca715bf6094605ae633b88b22c8703fb4672d18d3ca9jqrr" event={"ID":"0c02843e-3b00-4af6-8f78-d46dc77f427d","Type":"ContainerStarted","Data":"37336f397c08706adc936466e0d6280e50351af8ee1b7369f9717e843ca5bd3f"} Sep 29 19:11:24 crc kubenswrapper[4792]: I0929 19:11:24.538753 4792 generic.go:334] "Generic (PLEG): container finished" podID="0c02843e-3b00-4af6-8f78-d46dc77f427d" containerID="2de065fc4f895ef2e149e7b8cc6e5dd7d42b1eeda315dd0a2da102fe6accb05f" exitCode=0 Sep 29 19:11:24 crc kubenswrapper[4792]: I0929 19:11:24.538805 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/2a375d0d1c57dc6ca715bf6094605ae633b88b22c8703fb4672d18d3ca9jqrr" event={"ID":"0c02843e-3b00-4af6-8f78-d46dc77f427d","Type":"ContainerDied","Data":"2de065fc4f895ef2e149e7b8cc6e5dd7d42b1eeda315dd0a2da102fe6accb05f"} Sep 29 19:11:25 crc kubenswrapper[4792]: I0929 19:11:25.545313 4792 generic.go:334] "Generic (PLEG): container finished" podID="0c02843e-3b00-4af6-8f78-d46dc77f427d" containerID="e2b9aaff2b73d498f82368a284d1152365d5941e19c918def8173e771f3896a4" exitCode=0 Sep 29 19:11:25 crc kubenswrapper[4792]: I0929 19:11:25.545357 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/2a375d0d1c57dc6ca715bf6094605ae633b88b22c8703fb4672d18d3ca9jqrr" event={"ID":"0c02843e-3b00-4af6-8f78-d46dc77f427d","Type":"ContainerDied","Data":"e2b9aaff2b73d498f82368a284d1152365d5941e19c918def8173e771f3896a4"} Sep 29 19:11:26 crc kubenswrapper[4792]: I0929 19:11:26.799513 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/2a375d0d1c57dc6ca715bf6094605ae633b88b22c8703fb4672d18d3ca9jqrr" Sep 29 19:11:26 crc kubenswrapper[4792]: I0929 19:11:26.939312 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/0c02843e-3b00-4af6-8f78-d46dc77f427d-bundle\") pod \"0c02843e-3b00-4af6-8f78-d46dc77f427d\" (UID: \"0c02843e-3b00-4af6-8f78-d46dc77f427d\") " Sep 29 19:11:26 crc kubenswrapper[4792]: I0929 19:11:26.939480 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/0c02843e-3b00-4af6-8f78-d46dc77f427d-util\") pod \"0c02843e-3b00-4af6-8f78-d46dc77f427d\" (UID: \"0c02843e-3b00-4af6-8f78-d46dc77f427d\") " Sep 29 19:11:26 crc kubenswrapper[4792]: I0929 19:11:26.939562 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2b67j\" (UniqueName: \"kubernetes.io/projected/0c02843e-3b00-4af6-8f78-d46dc77f427d-kube-api-access-2b67j\") pod \"0c02843e-3b00-4af6-8f78-d46dc77f427d\" (UID: \"0c02843e-3b00-4af6-8f78-d46dc77f427d\") " Sep 29 19:11:26 crc kubenswrapper[4792]: I0929 19:11:26.941379 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0c02843e-3b00-4af6-8f78-d46dc77f427d-bundle" (OuterVolumeSpecName: "bundle") pod "0c02843e-3b00-4af6-8f78-d46dc77f427d" (UID: "0c02843e-3b00-4af6-8f78-d46dc77f427d"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 19:11:26 crc kubenswrapper[4792]: I0929 19:11:26.946128 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0c02843e-3b00-4af6-8f78-d46dc77f427d-kube-api-access-2b67j" (OuterVolumeSpecName: "kube-api-access-2b67j") pod "0c02843e-3b00-4af6-8f78-d46dc77f427d" (UID: "0c02843e-3b00-4af6-8f78-d46dc77f427d"). InnerVolumeSpecName "kube-api-access-2b67j". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:11:26 crc kubenswrapper[4792]: I0929 19:11:26.954614 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0c02843e-3b00-4af6-8f78-d46dc77f427d-util" (OuterVolumeSpecName: "util") pod "0c02843e-3b00-4af6-8f78-d46dc77f427d" (UID: "0c02843e-3b00-4af6-8f78-d46dc77f427d"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 19:11:27 crc kubenswrapper[4792]: I0929 19:11:27.041277 4792 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/0c02843e-3b00-4af6-8f78-d46dc77f427d-util\") on node \"crc\" DevicePath \"\"" Sep 29 19:11:27 crc kubenswrapper[4792]: I0929 19:11:27.041316 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2b67j\" (UniqueName: \"kubernetes.io/projected/0c02843e-3b00-4af6-8f78-d46dc77f427d-kube-api-access-2b67j\") on node \"crc\" DevicePath \"\"" Sep 29 19:11:27 crc kubenswrapper[4792]: I0929 19:11:27.041327 4792 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/0c02843e-3b00-4af6-8f78-d46dc77f427d-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 19:11:27 crc kubenswrapper[4792]: I0929 19:11:27.558865 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/2a375d0d1c57dc6ca715bf6094605ae633b88b22c8703fb4672d18d3ca9jqrr" event={"ID":"0c02843e-3b00-4af6-8f78-d46dc77f427d","Type":"ContainerDied","Data":"37336f397c08706adc936466e0d6280e50351af8ee1b7369f9717e843ca5bd3f"} Sep 29 19:11:27 crc kubenswrapper[4792]: I0929 19:11:27.558902 4792 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="37336f397c08706adc936466e0d6280e50351af8ee1b7369f9717e843ca5bd3f" Sep 29 19:11:27 crc kubenswrapper[4792]: I0929 19:11:27.558929 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/2a375d0d1c57dc6ca715bf6094605ae633b88b22c8703fb4672d18d3ca9jqrr" Sep 29 19:11:35 crc kubenswrapper[4792]: I0929 19:11:35.308202 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-controller-operator-7f7c575847-m64w4"] Sep 29 19:11:35 crc kubenswrapper[4792]: E0929 19:11:35.309137 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0c02843e-3b00-4af6-8f78-d46dc77f427d" containerName="extract" Sep 29 19:11:35 crc kubenswrapper[4792]: I0929 19:11:35.309152 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="0c02843e-3b00-4af6-8f78-d46dc77f427d" containerName="extract" Sep 29 19:11:35 crc kubenswrapper[4792]: E0929 19:11:35.309165 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0c02843e-3b00-4af6-8f78-d46dc77f427d" containerName="util" Sep 29 19:11:35 crc kubenswrapper[4792]: I0929 19:11:35.309172 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="0c02843e-3b00-4af6-8f78-d46dc77f427d" containerName="util" Sep 29 19:11:35 crc kubenswrapper[4792]: E0929 19:11:35.309187 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0c02843e-3b00-4af6-8f78-d46dc77f427d" containerName="pull" Sep 29 19:11:35 crc kubenswrapper[4792]: I0929 19:11:35.309194 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="0c02843e-3b00-4af6-8f78-d46dc77f427d" containerName="pull" Sep 29 19:11:35 crc kubenswrapper[4792]: I0929 19:11:35.309328 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="0c02843e-3b00-4af6-8f78-d46dc77f427d" containerName="extract" Sep 29 19:11:35 crc kubenswrapper[4792]: I0929 19:11:35.310125 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-operator-7f7c575847-m64w4" Sep 29 19:11:35 crc kubenswrapper[4792]: I0929 19:11:35.314731 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-controller-operator-dockercfg-2r85b" Sep 29 19:11:35 crc kubenswrapper[4792]: I0929 19:11:35.354184 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-operator-7f7c575847-m64w4"] Sep 29 19:11:35 crc kubenswrapper[4792]: I0929 19:11:35.444705 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-276w9\" (UniqueName: \"kubernetes.io/projected/28b981e4-ec59-452c-950d-2b86f346df10-kube-api-access-276w9\") pod \"openstack-operator-controller-operator-7f7c575847-m64w4\" (UID: \"28b981e4-ec59-452c-950d-2b86f346df10\") " pod="openstack-operators/openstack-operator-controller-operator-7f7c575847-m64w4" Sep 29 19:11:35 crc kubenswrapper[4792]: I0929 19:11:35.545521 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-276w9\" (UniqueName: \"kubernetes.io/projected/28b981e4-ec59-452c-950d-2b86f346df10-kube-api-access-276w9\") pod \"openstack-operator-controller-operator-7f7c575847-m64w4\" (UID: \"28b981e4-ec59-452c-950d-2b86f346df10\") " pod="openstack-operators/openstack-operator-controller-operator-7f7c575847-m64w4" Sep 29 19:11:35 crc kubenswrapper[4792]: I0929 19:11:35.563234 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-276w9\" (UniqueName: \"kubernetes.io/projected/28b981e4-ec59-452c-950d-2b86f346df10-kube-api-access-276w9\") pod \"openstack-operator-controller-operator-7f7c575847-m64w4\" (UID: \"28b981e4-ec59-452c-950d-2b86f346df10\") " pod="openstack-operators/openstack-operator-controller-operator-7f7c575847-m64w4" Sep 29 19:11:35 crc kubenswrapper[4792]: I0929 19:11:35.631525 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-operator-7f7c575847-m64w4" Sep 29 19:11:36 crc kubenswrapper[4792]: I0929 19:11:36.106956 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-operator-7f7c575847-m64w4"] Sep 29 19:11:36 crc kubenswrapper[4792]: I0929 19:11:36.614653 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-operator-7f7c575847-m64w4" event={"ID":"28b981e4-ec59-452c-950d-2b86f346df10","Type":"ContainerStarted","Data":"099abac4f0f81220397cd86425e5bca8f34d89fed25eaaa6b7ccc628b3b31f39"} Sep 29 19:11:41 crc kubenswrapper[4792]: I0929 19:11:41.646330 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-operator-7f7c575847-m64w4" event={"ID":"28b981e4-ec59-452c-950d-2b86f346df10","Type":"ContainerStarted","Data":"5d8ba47d14f1573fb47ecfdd788b4818fb1eb5a3d59586067aff5d648b487232"} Sep 29 19:11:43 crc kubenswrapper[4792]: I0929 19:11:43.659905 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-operator-7f7c575847-m64w4" event={"ID":"28b981e4-ec59-452c-950d-2b86f346df10","Type":"ContainerStarted","Data":"40c1d5bd08cdfc6b7c6bf3f9cd4a9d373064f272f01a98689118ee320ff0cb81"} Sep 29 19:11:43 crc kubenswrapper[4792]: I0929 19:11:43.660253 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-controller-operator-7f7c575847-m64w4" Sep 29 19:11:43 crc kubenswrapper[4792]: I0929 19:11:43.708314 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-controller-operator-7f7c575847-m64w4" podStartSLOduration=1.405778199 podStartE2EDuration="8.708292929s" podCreationTimestamp="2025-09-29 19:11:35 +0000 UTC" firstStartedPulling="2025-09-29 19:11:36.121575632 +0000 UTC m=+908.114883028" lastFinishedPulling="2025-09-29 19:11:43.424090362 +0000 UTC m=+915.417397758" observedRunningTime="2025-09-29 19:11:43.704281442 +0000 UTC m=+915.697588858" watchObservedRunningTime="2025-09-29 19:11:43.708292929 +0000 UTC m=+915.701600325" Sep 29 19:11:55 crc kubenswrapper[4792]: I0929 19:11:55.634316 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-controller-operator-7f7c575847-m64w4" Sep 29 19:12:13 crc kubenswrapper[4792]: I0929 19:12:13.054558 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/barbican-operator-controller-manager-6ff8b75857-c6z65"] Sep 29 19:12:13 crc kubenswrapper[4792]: I0929 19:12:13.056274 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/barbican-operator-controller-manager-6ff8b75857-c6z65" Sep 29 19:12:13 crc kubenswrapper[4792]: I0929 19:12:13.058476 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"barbican-operator-controller-manager-dockercfg-wnl64" Sep 29 19:12:13 crc kubenswrapper[4792]: I0929 19:12:13.058937 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/cinder-operator-controller-manager-644bddb6d8-vjkgm"] Sep 29 19:12:13 crc kubenswrapper[4792]: I0929 19:12:13.060116 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/cinder-operator-controller-manager-644bddb6d8-vjkgm" Sep 29 19:12:13 crc kubenswrapper[4792]: I0929 19:12:13.062973 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"cinder-operator-controller-manager-dockercfg-dfnmh" Sep 29 19:12:13 crc kubenswrapper[4792]: I0929 19:12:13.073860 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/cinder-operator-controller-manager-644bddb6d8-vjkgm"] Sep 29 19:12:13 crc kubenswrapper[4792]: I0929 19:12:13.108810 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/designate-operator-controller-manager-84f4f7b77b-h9csw"] Sep 29 19:12:13 crc kubenswrapper[4792]: I0929 19:12:13.110243 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/designate-operator-controller-manager-84f4f7b77b-h9csw" Sep 29 19:12:13 crc kubenswrapper[4792]: I0929 19:12:13.113179 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"designate-operator-controller-manager-dockercfg-jzgv2" Sep 29 19:12:13 crc kubenswrapper[4792]: I0929 19:12:13.113913 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xv9b5\" (UniqueName: \"kubernetes.io/projected/ee957b59-f5b6-4306-b6a7-4550199fe910-kube-api-access-xv9b5\") pod \"barbican-operator-controller-manager-6ff8b75857-c6z65\" (UID: \"ee957b59-f5b6-4306-b6a7-4550199fe910\") " pod="openstack-operators/barbican-operator-controller-manager-6ff8b75857-c6z65" Sep 29 19:12:13 crc kubenswrapper[4792]: I0929 19:12:13.113970 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rp7pb\" (UniqueName: \"kubernetes.io/projected/7f29d397-4b2d-4668-91f6-744e22070f30-kube-api-access-rp7pb\") pod \"cinder-operator-controller-manager-644bddb6d8-vjkgm\" (UID: \"7f29d397-4b2d-4668-91f6-744e22070f30\") " pod="openstack-operators/cinder-operator-controller-manager-644bddb6d8-vjkgm" Sep 29 19:12:13 crc kubenswrapper[4792]: I0929 19:12:13.114023 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b6j7r\" (UniqueName: \"kubernetes.io/projected/aa102219-aaa4-46c5-b783-519972688523-kube-api-access-b6j7r\") pod \"designate-operator-controller-manager-84f4f7b77b-h9csw\" (UID: \"aa102219-aaa4-46c5-b783-519972688523\") " pod="openstack-operators/designate-operator-controller-manager-84f4f7b77b-h9csw" Sep 29 19:12:13 crc kubenswrapper[4792]: I0929 19:12:13.126441 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/designate-operator-controller-manager-84f4f7b77b-h9csw"] Sep 29 19:12:13 crc kubenswrapper[4792]: I0929 19:12:13.132462 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/glance-operator-controller-manager-84958c4d49-j2crr"] Sep 29 19:12:13 crc kubenswrapper[4792]: I0929 19:12:13.133381 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/glance-operator-controller-manager-84958c4d49-j2crr" Sep 29 19:12:13 crc kubenswrapper[4792]: I0929 19:12:13.138521 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/barbican-operator-controller-manager-6ff8b75857-c6z65"] Sep 29 19:12:13 crc kubenswrapper[4792]: I0929 19:12:13.138724 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"glance-operator-controller-manager-dockercfg-hhrf4" Sep 29 19:12:13 crc kubenswrapper[4792]: I0929 19:12:13.159165 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/glance-operator-controller-manager-84958c4d49-j2crr"] Sep 29 19:12:13 crc kubenswrapper[4792]: I0929 19:12:13.194140 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/heat-operator-controller-manager-5d889d78cf-fcg79"] Sep 29 19:12:13 crc kubenswrapper[4792]: I0929 19:12:13.198413 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/heat-operator-controller-manager-5d889d78cf-fcg79" Sep 29 19:12:13 crc kubenswrapper[4792]: I0929 19:12:13.202577 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"heat-operator-controller-manager-dockercfg-9qvd4" Sep 29 19:12:13 crc kubenswrapper[4792]: I0929 19:12:13.208981 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/heat-operator-controller-manager-5d889d78cf-fcg79"] Sep 29 19:12:13 crc kubenswrapper[4792]: I0929 19:12:13.215014 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g4p7v\" (UniqueName: \"kubernetes.io/projected/bd8fdc17-d2f2-4644-8789-c8188f91ce61-kube-api-access-g4p7v\") pod \"glance-operator-controller-manager-84958c4d49-j2crr\" (UID: \"bd8fdc17-d2f2-4644-8789-c8188f91ce61\") " pod="openstack-operators/glance-operator-controller-manager-84958c4d49-j2crr" Sep 29 19:12:13 crc kubenswrapper[4792]: I0929 19:12:13.215060 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mdssk\" (UniqueName: \"kubernetes.io/projected/1c191b6e-d1aa-4576-98da-db7178aed835-kube-api-access-mdssk\") pod \"heat-operator-controller-manager-5d889d78cf-fcg79\" (UID: \"1c191b6e-d1aa-4576-98da-db7178aed835\") " pod="openstack-operators/heat-operator-controller-manager-5d889d78cf-fcg79" Sep 29 19:12:13 crc kubenswrapper[4792]: I0929 19:12:13.215112 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b6j7r\" (UniqueName: \"kubernetes.io/projected/aa102219-aaa4-46c5-b783-519972688523-kube-api-access-b6j7r\") pod \"designate-operator-controller-manager-84f4f7b77b-h9csw\" (UID: \"aa102219-aaa4-46c5-b783-519972688523\") " pod="openstack-operators/designate-operator-controller-manager-84f4f7b77b-h9csw" Sep 29 19:12:13 crc kubenswrapper[4792]: I0929 19:12:13.215204 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xv9b5\" (UniqueName: \"kubernetes.io/projected/ee957b59-f5b6-4306-b6a7-4550199fe910-kube-api-access-xv9b5\") pod \"barbican-operator-controller-manager-6ff8b75857-c6z65\" (UID: \"ee957b59-f5b6-4306-b6a7-4550199fe910\") " pod="openstack-operators/barbican-operator-controller-manager-6ff8b75857-c6z65" Sep 29 19:12:13 crc kubenswrapper[4792]: I0929 19:12:13.215242 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rp7pb\" (UniqueName: \"kubernetes.io/projected/7f29d397-4b2d-4668-91f6-744e22070f30-kube-api-access-rp7pb\") pod \"cinder-operator-controller-manager-644bddb6d8-vjkgm\" (UID: \"7f29d397-4b2d-4668-91f6-744e22070f30\") " pod="openstack-operators/cinder-operator-controller-manager-644bddb6d8-vjkgm" Sep 29 19:12:13 crc kubenswrapper[4792]: I0929 19:12:13.246301 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/horizon-operator-controller-manager-9f4696d94-d48vc"] Sep 29 19:12:13 crc kubenswrapper[4792]: I0929 19:12:13.247376 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/horizon-operator-controller-manager-9f4696d94-d48vc" Sep 29 19:12:13 crc kubenswrapper[4792]: I0929 19:12:13.263524 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xv9b5\" (UniqueName: \"kubernetes.io/projected/ee957b59-f5b6-4306-b6a7-4550199fe910-kube-api-access-xv9b5\") pod \"barbican-operator-controller-manager-6ff8b75857-c6z65\" (UID: \"ee957b59-f5b6-4306-b6a7-4550199fe910\") " pod="openstack-operators/barbican-operator-controller-manager-6ff8b75857-c6z65" Sep 29 19:12:13 crc kubenswrapper[4792]: I0929 19:12:13.273230 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"horizon-operator-controller-manager-dockercfg-lj2fn" Sep 29 19:12:13 crc kubenswrapper[4792]: I0929 19:12:13.274168 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b6j7r\" (UniqueName: \"kubernetes.io/projected/aa102219-aaa4-46c5-b783-519972688523-kube-api-access-b6j7r\") pod \"designate-operator-controller-manager-84f4f7b77b-h9csw\" (UID: \"aa102219-aaa4-46c5-b783-519972688523\") " pod="openstack-operators/designate-operator-controller-manager-84f4f7b77b-h9csw" Sep 29 19:12:13 crc kubenswrapper[4792]: I0929 19:12:13.276344 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/infra-operator-controller-manager-7d857cc749-fn8fk"] Sep 29 19:12:13 crc kubenswrapper[4792]: I0929 19:12:13.284151 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-controller-manager-7d857cc749-fn8fk" Sep 29 19:12:13 crc kubenswrapper[4792]: I0929 19:12:13.286365 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rp7pb\" (UniqueName: \"kubernetes.io/projected/7f29d397-4b2d-4668-91f6-744e22070f30-kube-api-access-rp7pb\") pod \"cinder-operator-controller-manager-644bddb6d8-vjkgm\" (UID: \"7f29d397-4b2d-4668-91f6-744e22070f30\") " pod="openstack-operators/cinder-operator-controller-manager-644bddb6d8-vjkgm" Sep 29 19:12:13 crc kubenswrapper[4792]: I0929 19:12:13.289548 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"infra-operator-webhook-server-cert" Sep 29 19:12:13 crc kubenswrapper[4792]: I0929 19:12:13.289813 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"infra-operator-controller-manager-dockercfg-6d4lg" Sep 29 19:12:13 crc kubenswrapper[4792]: I0929 19:12:13.292715 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/horizon-operator-controller-manager-9f4696d94-d48vc"] Sep 29 19:12:13 crc kubenswrapper[4792]: I0929 19:12:13.310940 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-controller-manager-7d857cc749-fn8fk"] Sep 29 19:12:13 crc kubenswrapper[4792]: I0929 19:12:13.316316 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g4p7v\" (UniqueName: \"kubernetes.io/projected/bd8fdc17-d2f2-4644-8789-c8188f91ce61-kube-api-access-g4p7v\") pod \"glance-operator-controller-manager-84958c4d49-j2crr\" (UID: \"bd8fdc17-d2f2-4644-8789-c8188f91ce61\") " pod="openstack-operators/glance-operator-controller-manager-84958c4d49-j2crr" Sep 29 19:12:13 crc kubenswrapper[4792]: I0929 19:12:13.316351 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mdssk\" (UniqueName: \"kubernetes.io/projected/1c191b6e-d1aa-4576-98da-db7178aed835-kube-api-access-mdssk\") pod \"heat-operator-controller-manager-5d889d78cf-fcg79\" (UID: \"1c191b6e-d1aa-4576-98da-db7178aed835\") " pod="openstack-operators/heat-operator-controller-manager-5d889d78cf-fcg79" Sep 29 19:12:13 crc kubenswrapper[4792]: I0929 19:12:13.321933 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/ironic-operator-controller-manager-7975b88857-nfk4r"] Sep 29 19:12:13 crc kubenswrapper[4792]: I0929 19:12:13.334321 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ironic-operator-controller-manager-7975b88857-nfk4r" Sep 29 19:12:13 crc kubenswrapper[4792]: I0929 19:12:13.343919 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ironic-operator-controller-manager-7975b88857-nfk4r"] Sep 29 19:12:13 crc kubenswrapper[4792]: I0929 19:12:13.349444 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"ironic-operator-controller-manager-dockercfg-rsrrf" Sep 29 19:12:13 crc kubenswrapper[4792]: I0929 19:12:13.373893 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mdssk\" (UniqueName: \"kubernetes.io/projected/1c191b6e-d1aa-4576-98da-db7178aed835-kube-api-access-mdssk\") pod \"heat-operator-controller-manager-5d889d78cf-fcg79\" (UID: \"1c191b6e-d1aa-4576-98da-db7178aed835\") " pod="openstack-operators/heat-operator-controller-manager-5d889d78cf-fcg79" Sep 29 19:12:13 crc kubenswrapper[4792]: I0929 19:12:13.387087 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/cinder-operator-controller-manager-644bddb6d8-vjkgm" Sep 29 19:12:13 crc kubenswrapper[4792]: I0929 19:12:13.391088 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/keystone-operator-controller-manager-5bd55b4bff-swj98"] Sep 29 19:12:13 crc kubenswrapper[4792]: I0929 19:12:13.403962 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g4p7v\" (UniqueName: \"kubernetes.io/projected/bd8fdc17-d2f2-4644-8789-c8188f91ce61-kube-api-access-g4p7v\") pod \"glance-operator-controller-manager-84958c4d49-j2crr\" (UID: \"bd8fdc17-d2f2-4644-8789-c8188f91ce61\") " pod="openstack-operators/glance-operator-controller-manager-84958c4d49-j2crr" Sep 29 19:12:13 crc kubenswrapper[4792]: I0929 19:12:13.411297 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-controller-manager-5bd55b4bff-swj98" Sep 29 19:12:13 crc kubenswrapper[4792]: I0929 19:12:13.418245 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/barbican-operator-controller-manager-6ff8b75857-c6z65" Sep 29 19:12:13 crc kubenswrapper[4792]: I0929 19:12:13.419169 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"keystone-operator-controller-manager-dockercfg-nqgt4" Sep 29 19:12:13 crc kubenswrapper[4792]: I0929 19:12:13.420893 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/ca7e36bc-4aa5-414f-92a4-db59399217b9-cert\") pod \"infra-operator-controller-manager-7d857cc749-fn8fk\" (UID: \"ca7e36bc-4aa5-414f-92a4-db59399217b9\") " pod="openstack-operators/infra-operator-controller-manager-7d857cc749-fn8fk" Sep 29 19:12:13 crc kubenswrapper[4792]: I0929 19:12:13.420926 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n5hzm\" (UniqueName: \"kubernetes.io/projected/b9148442-b4dc-4926-920d-33c9a00172fa-kube-api-access-n5hzm\") pod \"keystone-operator-controller-manager-5bd55b4bff-swj98\" (UID: \"b9148442-b4dc-4926-920d-33c9a00172fa\") " pod="openstack-operators/keystone-operator-controller-manager-5bd55b4bff-swj98" Sep 29 19:12:13 crc kubenswrapper[4792]: I0929 19:12:13.420960 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5qwq5\" (UniqueName: \"kubernetes.io/projected/1d9cd325-38fc-4c7c-bd2f-51b86aa23d2e-kube-api-access-5qwq5\") pod \"horizon-operator-controller-manager-9f4696d94-d48vc\" (UID: \"1d9cd325-38fc-4c7c-bd2f-51b86aa23d2e\") " pod="openstack-operators/horizon-operator-controller-manager-9f4696d94-d48vc" Sep 29 19:12:13 crc kubenswrapper[4792]: I0929 19:12:13.420988 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gprll\" (UniqueName: \"kubernetes.io/projected/fdf51eb0-6b5f-43ad-ba01-c8ff12508dc0-kube-api-access-gprll\") pod \"ironic-operator-controller-manager-7975b88857-nfk4r\" (UID: \"fdf51eb0-6b5f-43ad-ba01-c8ff12508dc0\") " pod="openstack-operators/ironic-operator-controller-manager-7975b88857-nfk4r" Sep 29 19:12:13 crc kubenswrapper[4792]: I0929 19:12:13.421036 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j4cvx\" (UniqueName: \"kubernetes.io/projected/ca7e36bc-4aa5-414f-92a4-db59399217b9-kube-api-access-j4cvx\") pod \"infra-operator-controller-manager-7d857cc749-fn8fk\" (UID: \"ca7e36bc-4aa5-414f-92a4-db59399217b9\") " pod="openstack-operators/infra-operator-controller-manager-7d857cc749-fn8fk" Sep 29 19:12:13 crc kubenswrapper[4792]: I0929 19:12:13.434675 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/designate-operator-controller-manager-84f4f7b77b-h9csw" Sep 29 19:12:13 crc kubenswrapper[4792]: I0929 19:12:13.446055 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/manila-operator-controller-manager-6d68dbc695-zzmf2"] Sep 29 19:12:13 crc kubenswrapper[4792]: I0929 19:12:13.447774 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/manila-operator-controller-manager-6d68dbc695-zzmf2" Sep 29 19:12:13 crc kubenswrapper[4792]: I0929 19:12:13.463120 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/glance-operator-controller-manager-84958c4d49-j2crr" Sep 29 19:12:13 crc kubenswrapper[4792]: I0929 19:12:13.474242 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-88c7-t4srt"] Sep 29 19:12:13 crc kubenswrapper[4792]: I0929 19:12:13.474431 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"manila-operator-controller-manager-dockercfg-62ngr" Sep 29 19:12:13 crc kubenswrapper[4792]: I0929 19:12:13.475189 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/keystone-operator-controller-manager-5bd55b4bff-swj98"] Sep 29 19:12:13 crc kubenswrapper[4792]: I0929 19:12:13.475263 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-controller-manager-88c7-t4srt" Sep 29 19:12:13 crc kubenswrapper[4792]: I0929 19:12:13.483029 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"mariadb-operator-controller-manager-dockercfg-j5zjj" Sep 29 19:12:13 crc kubenswrapper[4792]: I0929 19:12:13.492757 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/manila-operator-controller-manager-6d68dbc695-zzmf2"] Sep 29 19:12:13 crc kubenswrapper[4792]: I0929 19:12:13.502305 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/neutron-operator-controller-manager-64d7b59854-rrnhb"] Sep 29 19:12:13 crc kubenswrapper[4792]: I0929 19:12:13.503418 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/neutron-operator-controller-manager-64d7b59854-rrnhb" Sep 29 19:12:13 crc kubenswrapper[4792]: I0929 19:12:13.505896 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/neutron-operator-controller-manager-64d7b59854-rrnhb"] Sep 29 19:12:13 crc kubenswrapper[4792]: I0929 19:12:13.506279 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"neutron-operator-controller-manager-dockercfg-2c264" Sep 29 19:12:13 crc kubenswrapper[4792]: I0929 19:12:13.515422 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-88c7-t4srt"] Sep 29 19:12:13 crc kubenswrapper[4792]: I0929 19:12:13.523698 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j4cvx\" (UniqueName: \"kubernetes.io/projected/ca7e36bc-4aa5-414f-92a4-db59399217b9-kube-api-access-j4cvx\") pod \"infra-operator-controller-manager-7d857cc749-fn8fk\" (UID: \"ca7e36bc-4aa5-414f-92a4-db59399217b9\") " pod="openstack-operators/infra-operator-controller-manager-7d857cc749-fn8fk" Sep 29 19:12:13 crc kubenswrapper[4792]: I0929 19:12:13.523938 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/ca7e36bc-4aa5-414f-92a4-db59399217b9-cert\") pod \"infra-operator-controller-manager-7d857cc749-fn8fk\" (UID: \"ca7e36bc-4aa5-414f-92a4-db59399217b9\") " pod="openstack-operators/infra-operator-controller-manager-7d857cc749-fn8fk" Sep 29 19:12:13 crc kubenswrapper[4792]: I0929 19:12:13.524052 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n5hzm\" (UniqueName: \"kubernetes.io/projected/b9148442-b4dc-4926-920d-33c9a00172fa-kube-api-access-n5hzm\") pod \"keystone-operator-controller-manager-5bd55b4bff-swj98\" (UID: \"b9148442-b4dc-4926-920d-33c9a00172fa\") " pod="openstack-operators/keystone-operator-controller-manager-5bd55b4bff-swj98" Sep 29 19:12:13 crc kubenswrapper[4792]: I0929 19:12:13.524177 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5qwq5\" (UniqueName: \"kubernetes.io/projected/1d9cd325-38fc-4c7c-bd2f-51b86aa23d2e-kube-api-access-5qwq5\") pod \"horizon-operator-controller-manager-9f4696d94-d48vc\" (UID: \"1d9cd325-38fc-4c7c-bd2f-51b86aa23d2e\") " pod="openstack-operators/horizon-operator-controller-manager-9f4696d94-d48vc" Sep 29 19:12:13 crc kubenswrapper[4792]: I0929 19:12:13.524305 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gprll\" (UniqueName: \"kubernetes.io/projected/fdf51eb0-6b5f-43ad-ba01-c8ff12508dc0-kube-api-access-gprll\") pod \"ironic-operator-controller-manager-7975b88857-nfk4r\" (UID: \"fdf51eb0-6b5f-43ad-ba01-c8ff12508dc0\") " pod="openstack-operators/ironic-operator-controller-manager-7975b88857-nfk4r" Sep 29 19:12:13 crc kubenswrapper[4792]: E0929 19:12:13.525268 4792 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Sep 29 19:12:13 crc kubenswrapper[4792]: E0929 19:12:13.525399 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/ca7e36bc-4aa5-414f-92a4-db59399217b9-cert podName:ca7e36bc-4aa5-414f-92a4-db59399217b9 nodeName:}" failed. No retries permitted until 2025-09-29 19:12:14.025381588 +0000 UTC m=+946.018688984 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/ca7e36bc-4aa5-414f-92a4-db59399217b9-cert") pod "infra-operator-controller-manager-7d857cc749-fn8fk" (UID: "ca7e36bc-4aa5-414f-92a4-db59399217b9") : secret "infra-operator-webhook-server-cert" not found Sep 29 19:12:13 crc kubenswrapper[4792]: I0929 19:12:13.526434 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/nova-operator-controller-manager-c7c776c96-4dfhr"] Sep 29 19:12:13 crc kubenswrapper[4792]: I0929 19:12:13.529593 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/nova-operator-controller-manager-c7c776c96-4dfhr" Sep 29 19:12:13 crc kubenswrapper[4792]: I0929 19:12:13.531167 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/heat-operator-controller-manager-5d889d78cf-fcg79" Sep 29 19:12:13 crc kubenswrapper[4792]: I0929 19:12:13.547883 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/octavia-operator-controller-manager-76fcc6dc7c-9wbkb"] Sep 29 19:12:13 crc kubenswrapper[4792]: I0929 19:12:13.549298 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/octavia-operator-controller-manager-76fcc6dc7c-9wbkb" Sep 29 19:12:13 crc kubenswrapper[4792]: I0929 19:12:13.549897 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"nova-operator-controller-manager-dockercfg-rcrs7" Sep 29 19:12:13 crc kubenswrapper[4792]: I0929 19:12:13.556512 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/nova-operator-controller-manager-c7c776c96-4dfhr"] Sep 29 19:12:13 crc kubenswrapper[4792]: I0929 19:12:13.576743 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n5hzm\" (UniqueName: \"kubernetes.io/projected/b9148442-b4dc-4926-920d-33c9a00172fa-kube-api-access-n5hzm\") pod \"keystone-operator-controller-manager-5bd55b4bff-swj98\" (UID: \"b9148442-b4dc-4926-920d-33c9a00172fa\") " pod="openstack-operators/keystone-operator-controller-manager-5bd55b4bff-swj98" Sep 29 19:12:13 crc kubenswrapper[4792]: I0929 19:12:13.556831 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"octavia-operator-controller-manager-dockercfg-wn7nd" Sep 29 19:12:13 crc kubenswrapper[4792]: I0929 19:12:13.582115 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/octavia-operator-controller-manager-76fcc6dc7c-9wbkb"] Sep 29 19:12:13 crc kubenswrapper[4792]: I0929 19:12:13.588372 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gprll\" (UniqueName: \"kubernetes.io/projected/fdf51eb0-6b5f-43ad-ba01-c8ff12508dc0-kube-api-access-gprll\") pod \"ironic-operator-controller-manager-7975b88857-nfk4r\" (UID: \"fdf51eb0-6b5f-43ad-ba01-c8ff12508dc0\") " pod="openstack-operators/ironic-operator-controller-manager-7975b88857-nfk4r" Sep 29 19:12:13 crc kubenswrapper[4792]: I0929 19:12:13.595470 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5qwq5\" (UniqueName: \"kubernetes.io/projected/1d9cd325-38fc-4c7c-bd2f-51b86aa23d2e-kube-api-access-5qwq5\") pod \"horizon-operator-controller-manager-9f4696d94-d48vc\" (UID: \"1d9cd325-38fc-4c7c-bd2f-51b86aa23d2e\") " pod="openstack-operators/horizon-operator-controller-manager-9f4696d94-d48vc" Sep 29 19:12:13 crc kubenswrapper[4792]: I0929 19:12:13.598205 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-6d776955-44cz7"] Sep 29 19:12:13 crc kubenswrapper[4792]: I0929 19:12:13.599411 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-baremetal-operator-controller-manager-6d776955-44cz7" Sep 29 19:12:13 crc kubenswrapper[4792]: I0929 19:12:13.602025 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-baremetal-operator-controller-manager-dockercfg-k295m" Sep 29 19:12:13 crc kubenswrapper[4792]: I0929 19:12:13.609235 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-baremetal-operator-webhook-server-cert" Sep 29 19:12:13 crc kubenswrapper[4792]: I0929 19:12:13.615117 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j4cvx\" (UniqueName: \"kubernetes.io/projected/ca7e36bc-4aa5-414f-92a4-db59399217b9-kube-api-access-j4cvx\") pod \"infra-operator-controller-manager-7d857cc749-fn8fk\" (UID: \"ca7e36bc-4aa5-414f-92a4-db59399217b9\") " pod="openstack-operators/infra-operator-controller-manager-7d857cc749-fn8fk" Sep 29 19:12:13 crc kubenswrapper[4792]: I0929 19:12:13.616604 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/ovn-operator-controller-manager-9976ff44c-zc87x"] Sep 29 19:12:13 crc kubenswrapper[4792]: I0929 19:12:13.618823 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ovn-operator-controller-manager-9976ff44c-zc87x" Sep 29 19:12:13 crc kubenswrapper[4792]: I0929 19:12:13.629322 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/c0dd6d9d-3f07-4723-ae97-7adb0a4863b1-cert\") pod \"openstack-baremetal-operator-controller-manager-6d776955-44cz7\" (UID: \"c0dd6d9d-3f07-4723-ae97-7adb0a4863b1\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-6d776955-44cz7" Sep 29 19:12:13 crc kubenswrapper[4792]: I0929 19:12:13.629355 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tv5g9\" (UniqueName: \"kubernetes.io/projected/dd149347-201c-4ce2-abdd-d41e57d1813a-kube-api-access-tv5g9\") pod \"octavia-operator-controller-manager-76fcc6dc7c-9wbkb\" (UID: \"dd149347-201c-4ce2-abdd-d41e57d1813a\") " pod="openstack-operators/octavia-operator-controller-manager-76fcc6dc7c-9wbkb" Sep 29 19:12:13 crc kubenswrapper[4792]: I0929 19:12:13.629418 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d7ckl\" (UniqueName: \"kubernetes.io/projected/c0dd6d9d-3f07-4723-ae97-7adb0a4863b1-kube-api-access-d7ckl\") pod \"openstack-baremetal-operator-controller-manager-6d776955-44cz7\" (UID: \"c0dd6d9d-3f07-4723-ae97-7adb0a4863b1\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-6d776955-44cz7" Sep 29 19:12:13 crc kubenswrapper[4792]: I0929 19:12:13.629436 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zscls\" (UniqueName: \"kubernetes.io/projected/020f5851-2dbc-464b-9217-6a3cb7a737a7-kube-api-access-zscls\") pod \"neutron-operator-controller-manager-64d7b59854-rrnhb\" (UID: \"020f5851-2dbc-464b-9217-6a3cb7a737a7\") " pod="openstack-operators/neutron-operator-controller-manager-64d7b59854-rrnhb" Sep 29 19:12:13 crc kubenswrapper[4792]: I0929 19:12:13.632587 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gskx2\" (UniqueName: \"kubernetes.io/projected/af042430-9b25-44c8-8f30-19db90025d05-kube-api-access-gskx2\") pod \"mariadb-operator-controller-manager-88c7-t4srt\" (UID: \"af042430-9b25-44c8-8f30-19db90025d05\") " pod="openstack-operators/mariadb-operator-controller-manager-88c7-t4srt" Sep 29 19:12:13 crc kubenswrapper[4792]: I0929 19:12:13.632625 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qvffj\" (UniqueName: \"kubernetes.io/projected/30ffe357-8b65-4481-95f2-7b2e13fd5676-kube-api-access-qvffj\") pod \"ovn-operator-controller-manager-9976ff44c-zc87x\" (UID: \"30ffe357-8b65-4481-95f2-7b2e13fd5676\") " pod="openstack-operators/ovn-operator-controller-manager-9976ff44c-zc87x" Sep 29 19:12:13 crc kubenswrapper[4792]: I0929 19:12:13.632663 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5sh8q\" (UniqueName: \"kubernetes.io/projected/49160b59-f488-40f9-b23d-a3bccc3c2cb9-kube-api-access-5sh8q\") pod \"manila-operator-controller-manager-6d68dbc695-zzmf2\" (UID: \"49160b59-f488-40f9-b23d-a3bccc3c2cb9\") " pod="openstack-operators/manila-operator-controller-manager-6d68dbc695-zzmf2" Sep 29 19:12:13 crc kubenswrapper[4792]: I0929 19:12:13.632746 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xnfqn\" (UniqueName: \"kubernetes.io/projected/12482564-55ba-46c6-857c-de815cddedc7-kube-api-access-xnfqn\") pod \"nova-operator-controller-manager-c7c776c96-4dfhr\" (UID: \"12482564-55ba-46c6-857c-de815cddedc7\") " pod="openstack-operators/nova-operator-controller-manager-c7c776c96-4dfhr" Sep 29 19:12:13 crc kubenswrapper[4792]: I0929 19:12:13.633364 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"ovn-operator-controller-manager-dockercfg-rkkgp" Sep 29 19:12:13 crc kubenswrapper[4792]: I0929 19:12:13.650215 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/horizon-operator-controller-manager-9f4696d94-d48vc" Sep 29 19:12:13 crc kubenswrapper[4792]: I0929 19:12:13.682019 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-6d776955-44cz7"] Sep 29 19:12:13 crc kubenswrapper[4792]: I0929 19:12:13.682090 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/placement-operator-controller-manager-589c58c6c-f8qhj"] Sep 29 19:12:13 crc kubenswrapper[4792]: I0929 19:12:13.683292 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/placement-operator-controller-manager-589c58c6c-f8qhj" Sep 29 19:12:13 crc kubenswrapper[4792]: I0929 19:12:13.696258 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"placement-operator-controller-manager-dockercfg-chs9j" Sep 29 19:12:13 crc kubenswrapper[4792]: I0929 19:12:13.712390 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ovn-operator-controller-manager-9976ff44c-zc87x"] Sep 29 19:12:13 crc kubenswrapper[4792]: I0929 19:12:13.728441 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/placement-operator-controller-manager-589c58c6c-f8qhj"] Sep 29 19:12:13 crc kubenswrapper[4792]: I0929 19:12:13.735338 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qvffj\" (UniqueName: \"kubernetes.io/projected/30ffe357-8b65-4481-95f2-7b2e13fd5676-kube-api-access-qvffj\") pod \"ovn-operator-controller-manager-9976ff44c-zc87x\" (UID: \"30ffe357-8b65-4481-95f2-7b2e13fd5676\") " pod="openstack-operators/ovn-operator-controller-manager-9976ff44c-zc87x" Sep 29 19:12:13 crc kubenswrapper[4792]: I0929 19:12:13.735376 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5sh8q\" (UniqueName: \"kubernetes.io/projected/49160b59-f488-40f9-b23d-a3bccc3c2cb9-kube-api-access-5sh8q\") pod \"manila-operator-controller-manager-6d68dbc695-zzmf2\" (UID: \"49160b59-f488-40f9-b23d-a3bccc3c2cb9\") " pod="openstack-operators/manila-operator-controller-manager-6d68dbc695-zzmf2" Sep 29 19:12:13 crc kubenswrapper[4792]: I0929 19:12:13.735425 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c4x4t\" (UniqueName: \"kubernetes.io/projected/18b79acc-6db2-4b4f-8f85-0b65dfd800b3-kube-api-access-c4x4t\") pod \"placement-operator-controller-manager-589c58c6c-f8qhj\" (UID: \"18b79acc-6db2-4b4f-8f85-0b65dfd800b3\") " pod="openstack-operators/placement-operator-controller-manager-589c58c6c-f8qhj" Sep 29 19:12:13 crc kubenswrapper[4792]: I0929 19:12:13.735452 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xnfqn\" (UniqueName: \"kubernetes.io/projected/12482564-55ba-46c6-857c-de815cddedc7-kube-api-access-xnfqn\") pod \"nova-operator-controller-manager-c7c776c96-4dfhr\" (UID: \"12482564-55ba-46c6-857c-de815cddedc7\") " pod="openstack-operators/nova-operator-controller-manager-c7c776c96-4dfhr" Sep 29 19:12:13 crc kubenswrapper[4792]: I0929 19:12:13.735488 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/c0dd6d9d-3f07-4723-ae97-7adb0a4863b1-cert\") pod \"openstack-baremetal-operator-controller-manager-6d776955-44cz7\" (UID: \"c0dd6d9d-3f07-4723-ae97-7adb0a4863b1\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-6d776955-44cz7" Sep 29 19:12:13 crc kubenswrapper[4792]: I0929 19:12:13.735505 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tv5g9\" (UniqueName: \"kubernetes.io/projected/dd149347-201c-4ce2-abdd-d41e57d1813a-kube-api-access-tv5g9\") pod \"octavia-operator-controller-manager-76fcc6dc7c-9wbkb\" (UID: \"dd149347-201c-4ce2-abdd-d41e57d1813a\") " pod="openstack-operators/octavia-operator-controller-manager-76fcc6dc7c-9wbkb" Sep 29 19:12:13 crc kubenswrapper[4792]: I0929 19:12:13.735553 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d7ckl\" (UniqueName: \"kubernetes.io/projected/c0dd6d9d-3f07-4723-ae97-7adb0a4863b1-kube-api-access-d7ckl\") pod \"openstack-baremetal-operator-controller-manager-6d776955-44cz7\" (UID: \"c0dd6d9d-3f07-4723-ae97-7adb0a4863b1\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-6d776955-44cz7" Sep 29 19:12:13 crc kubenswrapper[4792]: I0929 19:12:13.735571 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zscls\" (UniqueName: \"kubernetes.io/projected/020f5851-2dbc-464b-9217-6a3cb7a737a7-kube-api-access-zscls\") pod \"neutron-operator-controller-manager-64d7b59854-rrnhb\" (UID: \"020f5851-2dbc-464b-9217-6a3cb7a737a7\") " pod="openstack-operators/neutron-operator-controller-manager-64d7b59854-rrnhb" Sep 29 19:12:13 crc kubenswrapper[4792]: I0929 19:12:13.735601 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gskx2\" (UniqueName: \"kubernetes.io/projected/af042430-9b25-44c8-8f30-19db90025d05-kube-api-access-gskx2\") pod \"mariadb-operator-controller-manager-88c7-t4srt\" (UID: \"af042430-9b25-44c8-8f30-19db90025d05\") " pod="openstack-operators/mariadb-operator-controller-manager-88c7-t4srt" Sep 29 19:12:13 crc kubenswrapper[4792]: E0929 19:12:13.736222 4792 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Sep 29 19:12:13 crc kubenswrapper[4792]: E0929 19:12:13.736259 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/c0dd6d9d-3f07-4723-ae97-7adb0a4863b1-cert podName:c0dd6d9d-3f07-4723-ae97-7adb0a4863b1 nodeName:}" failed. No retries permitted until 2025-09-29 19:12:14.236245984 +0000 UTC m=+946.229553380 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/c0dd6d9d-3f07-4723-ae97-7adb0a4863b1-cert") pod "openstack-baremetal-operator-controller-manager-6d776955-44cz7" (UID: "c0dd6d9d-3f07-4723-ae97-7adb0a4863b1") : secret "openstack-baremetal-operator-webhook-server-cert" not found Sep 29 19:12:13 crc kubenswrapper[4792]: I0929 19:12:13.788681 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qvffj\" (UniqueName: \"kubernetes.io/projected/30ffe357-8b65-4481-95f2-7b2e13fd5676-kube-api-access-qvffj\") pod \"ovn-operator-controller-manager-9976ff44c-zc87x\" (UID: \"30ffe357-8b65-4481-95f2-7b2e13fd5676\") " pod="openstack-operators/ovn-operator-controller-manager-9976ff44c-zc87x" Sep 29 19:12:13 crc kubenswrapper[4792]: I0929 19:12:13.788924 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-b8d54b5d7-x5h9k"] Sep 29 19:12:13 crc kubenswrapper[4792]: I0929 19:12:13.790223 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/telemetry-operator-controller-manager-b8d54b5d7-x5h9k" Sep 29 19:12:13 crc kubenswrapper[4792]: I0929 19:12:13.801822 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"telemetry-operator-controller-manager-dockercfg-2cgll" Sep 29 19:12:13 crc kubenswrapper[4792]: I0929 19:12:13.802357 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/swift-operator-controller-manager-bc7dc7bd9-4p4rl"] Sep 29 19:12:13 crc kubenswrapper[4792]: I0929 19:12:13.803355 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/swift-operator-controller-manager-bc7dc7bd9-4p4rl" Sep 29 19:12:13 crc kubenswrapper[4792]: I0929 19:12:13.804280 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gskx2\" (UniqueName: \"kubernetes.io/projected/af042430-9b25-44c8-8f30-19db90025d05-kube-api-access-gskx2\") pod \"mariadb-operator-controller-manager-88c7-t4srt\" (UID: \"af042430-9b25-44c8-8f30-19db90025d05\") " pod="openstack-operators/mariadb-operator-controller-manager-88c7-t4srt" Sep 29 19:12:13 crc kubenswrapper[4792]: I0929 19:12:13.804531 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ironic-operator-controller-manager-7975b88857-nfk4r" Sep 29 19:12:13 crc kubenswrapper[4792]: I0929 19:12:13.810914 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d7ckl\" (UniqueName: \"kubernetes.io/projected/c0dd6d9d-3f07-4723-ae97-7adb0a4863b1-kube-api-access-d7ckl\") pod \"openstack-baremetal-operator-controller-manager-6d776955-44cz7\" (UID: \"c0dd6d9d-3f07-4723-ae97-7adb0a4863b1\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-6d776955-44cz7" Sep 29 19:12:13 crc kubenswrapper[4792]: I0929 19:12:13.811009 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xnfqn\" (UniqueName: \"kubernetes.io/projected/12482564-55ba-46c6-857c-de815cddedc7-kube-api-access-xnfqn\") pod \"nova-operator-controller-manager-c7c776c96-4dfhr\" (UID: \"12482564-55ba-46c6-857c-de815cddedc7\") " pod="openstack-operators/nova-operator-controller-manager-c7c776c96-4dfhr" Sep 29 19:12:13 crc kubenswrapper[4792]: I0929 19:12:13.812168 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tv5g9\" (UniqueName: \"kubernetes.io/projected/dd149347-201c-4ce2-abdd-d41e57d1813a-kube-api-access-tv5g9\") pod \"octavia-operator-controller-manager-76fcc6dc7c-9wbkb\" (UID: \"dd149347-201c-4ce2-abdd-d41e57d1813a\") " pod="openstack-operators/octavia-operator-controller-manager-76fcc6dc7c-9wbkb" Sep 29 19:12:13 crc kubenswrapper[4792]: I0929 19:12:13.814995 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5sh8q\" (UniqueName: \"kubernetes.io/projected/49160b59-f488-40f9-b23d-a3bccc3c2cb9-kube-api-access-5sh8q\") pod \"manila-operator-controller-manager-6d68dbc695-zzmf2\" (UID: \"49160b59-f488-40f9-b23d-a3bccc3c2cb9\") " pod="openstack-operators/manila-operator-controller-manager-6d68dbc695-zzmf2" Sep 29 19:12:13 crc kubenswrapper[4792]: I0929 19:12:13.817568 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zscls\" (UniqueName: \"kubernetes.io/projected/020f5851-2dbc-464b-9217-6a3cb7a737a7-kube-api-access-zscls\") pod \"neutron-operator-controller-manager-64d7b59854-rrnhb\" (UID: \"020f5851-2dbc-464b-9217-6a3cb7a737a7\") " pod="openstack-operators/neutron-operator-controller-manager-64d7b59854-rrnhb" Sep 29 19:12:13 crc kubenswrapper[4792]: I0929 19:12:13.833170 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-controller-manager-88c7-t4srt" Sep 29 19:12:13 crc kubenswrapper[4792]: I0929 19:12:13.835001 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"swift-operator-controller-manager-dockercfg-5zgv4" Sep 29 19:12:13 crc kubenswrapper[4792]: I0929 19:12:13.837111 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c4x4t\" (UniqueName: \"kubernetes.io/projected/18b79acc-6db2-4b4f-8f85-0b65dfd800b3-kube-api-access-c4x4t\") pod \"placement-operator-controller-manager-589c58c6c-f8qhj\" (UID: \"18b79acc-6db2-4b4f-8f85-0b65dfd800b3\") " pod="openstack-operators/placement-operator-controller-manager-589c58c6c-f8qhj" Sep 29 19:12:13 crc kubenswrapper[4792]: I0929 19:12:13.837143 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p7k6l\" (UniqueName: \"kubernetes.io/projected/5be754f6-b295-4ca1-8f47-5a827e39580a-kube-api-access-p7k6l\") pod \"swift-operator-controller-manager-bc7dc7bd9-4p4rl\" (UID: \"5be754f6-b295-4ca1-8f47-5a827e39580a\") " pod="openstack-operators/swift-operator-controller-manager-bc7dc7bd9-4p4rl" Sep 29 19:12:13 crc kubenswrapper[4792]: I0929 19:12:13.837194 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9cxwj\" (UniqueName: \"kubernetes.io/projected/aa049eb9-e9cf-47c9-a06b-91e8c787e6c1-kube-api-access-9cxwj\") pod \"telemetry-operator-controller-manager-b8d54b5d7-x5h9k\" (UID: \"aa049eb9-e9cf-47c9-a06b-91e8c787e6c1\") " pod="openstack-operators/telemetry-operator-controller-manager-b8d54b5d7-x5h9k" Sep 29 19:12:13 crc kubenswrapper[4792]: I0929 19:12:13.839508 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/swift-operator-controller-manager-bc7dc7bd9-4p4rl"] Sep 29 19:12:13 crc kubenswrapper[4792]: I0929 19:12:13.851756 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-controller-manager-5bd55b4bff-swj98" Sep 29 19:12:13 crc kubenswrapper[4792]: I0929 19:12:13.853077 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/test-operator-controller-manager-f66b554c6-frkgk"] Sep 29 19:12:13 crc kubenswrapper[4792]: I0929 19:12:13.854173 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/test-operator-controller-manager-f66b554c6-frkgk" Sep 29 19:12:13 crc kubenswrapper[4792]: I0929 19:12:13.859060 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"test-operator-controller-manager-dockercfg-hwld5" Sep 29 19:12:13 crc kubenswrapper[4792]: I0929 19:12:13.864136 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/manila-operator-controller-manager-6d68dbc695-zzmf2" Sep 29 19:12:13 crc kubenswrapper[4792]: I0929 19:12:13.882653 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/watcher-operator-controller-manager-76669f99c-m88jp"] Sep 29 19:12:13 crc kubenswrapper[4792]: I0929 19:12:13.883620 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/watcher-operator-controller-manager-76669f99c-m88jp" Sep 29 19:12:13 crc kubenswrapper[4792]: I0929 19:12:13.886418 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/test-operator-controller-manager-f66b554c6-frkgk"] Sep 29 19:12:13 crc kubenswrapper[4792]: I0929 19:12:13.887729 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/neutron-operator-controller-manager-64d7b59854-rrnhb" Sep 29 19:12:13 crc kubenswrapper[4792]: I0929 19:12:13.891934 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"watcher-operator-controller-manager-dockercfg-jx89t" Sep 29 19:12:13 crc kubenswrapper[4792]: I0929 19:12:13.920695 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-b8d54b5d7-x5h9k"] Sep 29 19:12:13 crc kubenswrapper[4792]: I0929 19:12:13.940510 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/octavia-operator-controller-manager-76fcc6dc7c-9wbkb" Sep 29 19:12:13 crc kubenswrapper[4792]: I0929 19:12:13.941754 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p7k6l\" (UniqueName: \"kubernetes.io/projected/5be754f6-b295-4ca1-8f47-5a827e39580a-kube-api-access-p7k6l\") pod \"swift-operator-controller-manager-bc7dc7bd9-4p4rl\" (UID: \"5be754f6-b295-4ca1-8f47-5a827e39580a\") " pod="openstack-operators/swift-operator-controller-manager-bc7dc7bd9-4p4rl" Sep 29 19:12:13 crc kubenswrapper[4792]: I0929 19:12:13.941794 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sxfms\" (UniqueName: \"kubernetes.io/projected/cc2b4990-0306-4b03-b344-b2e186883c4c-kube-api-access-sxfms\") pod \"test-operator-controller-manager-f66b554c6-frkgk\" (UID: \"cc2b4990-0306-4b03-b344-b2e186883c4c\") " pod="openstack-operators/test-operator-controller-manager-f66b554c6-frkgk" Sep 29 19:12:13 crc kubenswrapper[4792]: I0929 19:12:13.941836 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9cxwj\" (UniqueName: \"kubernetes.io/projected/aa049eb9-e9cf-47c9-a06b-91e8c787e6c1-kube-api-access-9cxwj\") pod \"telemetry-operator-controller-manager-b8d54b5d7-x5h9k\" (UID: \"aa049eb9-e9cf-47c9-a06b-91e8c787e6c1\") " pod="openstack-operators/telemetry-operator-controller-manager-b8d54b5d7-x5h9k" Sep 29 19:12:13 crc kubenswrapper[4792]: I0929 19:12:13.941884 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tckrh\" (UniqueName: \"kubernetes.io/projected/5ae40942-75a6-41a6-877a-4070bd348d32-kube-api-access-tckrh\") pod \"watcher-operator-controller-manager-76669f99c-m88jp\" (UID: \"5ae40942-75a6-41a6-877a-4070bd348d32\") " pod="openstack-operators/watcher-operator-controller-manager-76669f99c-m88jp" Sep 29 19:12:13 crc kubenswrapper[4792]: I0929 19:12:13.942235 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/nova-operator-controller-manager-c7c776c96-4dfhr" Sep 29 19:12:13 crc kubenswrapper[4792]: I0929 19:12:13.973344 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ovn-operator-controller-manager-9976ff44c-zc87x" Sep 29 19:12:14 crc kubenswrapper[4792]: I0929 19:12:14.007189 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/watcher-operator-controller-manager-76669f99c-m88jp"] Sep 29 19:12:14 crc kubenswrapper[4792]: I0929 19:12:14.016656 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c4x4t\" (UniqueName: \"kubernetes.io/projected/18b79acc-6db2-4b4f-8f85-0b65dfd800b3-kube-api-access-c4x4t\") pod \"placement-operator-controller-manager-589c58c6c-f8qhj\" (UID: \"18b79acc-6db2-4b4f-8f85-0b65dfd800b3\") " pod="openstack-operators/placement-operator-controller-manager-589c58c6c-f8qhj" Sep 29 19:12:14 crc kubenswrapper[4792]: I0929 19:12:14.041239 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/placement-operator-controller-manager-589c58c6c-f8qhj" Sep 29 19:12:14 crc kubenswrapper[4792]: I0929 19:12:14.046749 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9cxwj\" (UniqueName: \"kubernetes.io/projected/aa049eb9-e9cf-47c9-a06b-91e8c787e6c1-kube-api-access-9cxwj\") pod \"telemetry-operator-controller-manager-b8d54b5d7-x5h9k\" (UID: \"aa049eb9-e9cf-47c9-a06b-91e8c787e6c1\") " pod="openstack-operators/telemetry-operator-controller-manager-b8d54b5d7-x5h9k" Sep 29 19:12:14 crc kubenswrapper[4792]: I0929 19:12:14.054068 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sxfms\" (UniqueName: \"kubernetes.io/projected/cc2b4990-0306-4b03-b344-b2e186883c4c-kube-api-access-sxfms\") pod \"test-operator-controller-manager-f66b554c6-frkgk\" (UID: \"cc2b4990-0306-4b03-b344-b2e186883c4c\") " pod="openstack-operators/test-operator-controller-manager-f66b554c6-frkgk" Sep 29 19:12:14 crc kubenswrapper[4792]: I0929 19:12:14.054170 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/ca7e36bc-4aa5-414f-92a4-db59399217b9-cert\") pod \"infra-operator-controller-manager-7d857cc749-fn8fk\" (UID: \"ca7e36bc-4aa5-414f-92a4-db59399217b9\") " pod="openstack-operators/infra-operator-controller-manager-7d857cc749-fn8fk" Sep 29 19:12:14 crc kubenswrapper[4792]: I0929 19:12:14.054236 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tckrh\" (UniqueName: \"kubernetes.io/projected/5ae40942-75a6-41a6-877a-4070bd348d32-kube-api-access-tckrh\") pod \"watcher-operator-controller-manager-76669f99c-m88jp\" (UID: \"5ae40942-75a6-41a6-877a-4070bd348d32\") " pod="openstack-operators/watcher-operator-controller-manager-76669f99c-m88jp" Sep 29 19:12:14 crc kubenswrapper[4792]: E0929 19:12:14.054394 4792 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Sep 29 19:12:14 crc kubenswrapper[4792]: E0929 19:12:14.054446 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/ca7e36bc-4aa5-414f-92a4-db59399217b9-cert podName:ca7e36bc-4aa5-414f-92a4-db59399217b9 nodeName:}" failed. No retries permitted until 2025-09-29 19:12:15.054430404 +0000 UTC m=+947.047737800 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/ca7e36bc-4aa5-414f-92a4-db59399217b9-cert") pod "infra-operator-controller-manager-7d857cc749-fn8fk" (UID: "ca7e36bc-4aa5-414f-92a4-db59399217b9") : secret "infra-operator-webhook-server-cert" not found Sep 29 19:12:14 crc kubenswrapper[4792]: I0929 19:12:14.056468 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p7k6l\" (UniqueName: \"kubernetes.io/projected/5be754f6-b295-4ca1-8f47-5a827e39580a-kube-api-access-p7k6l\") pod \"swift-operator-controller-manager-bc7dc7bd9-4p4rl\" (UID: \"5be754f6-b295-4ca1-8f47-5a827e39580a\") " pod="openstack-operators/swift-operator-controller-manager-bc7dc7bd9-4p4rl" Sep 29 19:12:14 crc kubenswrapper[4792]: I0929 19:12:14.092126 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tckrh\" (UniqueName: \"kubernetes.io/projected/5ae40942-75a6-41a6-877a-4070bd348d32-kube-api-access-tckrh\") pod \"watcher-operator-controller-manager-76669f99c-m88jp\" (UID: \"5ae40942-75a6-41a6-877a-4070bd348d32\") " pod="openstack-operators/watcher-operator-controller-manager-76669f99c-m88jp" Sep 29 19:12:14 crc kubenswrapper[4792]: I0929 19:12:14.121260 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/telemetry-operator-controller-manager-b8d54b5d7-x5h9k" Sep 29 19:12:14 crc kubenswrapper[4792]: I0929 19:12:14.135259 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sxfms\" (UniqueName: \"kubernetes.io/projected/cc2b4990-0306-4b03-b344-b2e186883c4c-kube-api-access-sxfms\") pod \"test-operator-controller-manager-f66b554c6-frkgk\" (UID: \"cc2b4990-0306-4b03-b344-b2e186883c4c\") " pod="openstack-operators/test-operator-controller-manager-f66b554c6-frkgk" Sep 29 19:12:14 crc kubenswrapper[4792]: I0929 19:12:14.172381 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/swift-operator-controller-manager-bc7dc7bd9-4p4rl" Sep 29 19:12:14 crc kubenswrapper[4792]: I0929 19:12:14.183665 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/test-operator-controller-manager-f66b554c6-frkgk" Sep 29 19:12:14 crc kubenswrapper[4792]: I0929 19:12:14.223069 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/watcher-operator-controller-manager-76669f99c-m88jp" Sep 29 19:12:14 crc kubenswrapper[4792]: I0929 19:12:14.234009 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-controller-manager-57cc59b9c6-9f256"] Sep 29 19:12:14 crc kubenswrapper[4792]: I0929 19:12:14.235157 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-manager-57cc59b9c6-9f256" Sep 29 19:12:14 crc kubenswrapper[4792]: I0929 19:12:14.246596 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-controller-manager-dockercfg-bzln5" Sep 29 19:12:14 crc kubenswrapper[4792]: I0929 19:12:14.246771 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"webhook-server-cert" Sep 29 19:12:14 crc kubenswrapper[4792]: I0929 19:12:14.249655 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-manager-57cc59b9c6-9f256"] Sep 29 19:12:14 crc kubenswrapper[4792]: I0929 19:12:14.281249 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/c0dd6d9d-3f07-4723-ae97-7adb0a4863b1-cert\") pod \"openstack-baremetal-operator-controller-manager-6d776955-44cz7\" (UID: \"c0dd6d9d-3f07-4723-ae97-7adb0a4863b1\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-6d776955-44cz7" Sep 29 19:12:14 crc kubenswrapper[4792]: I0929 19:12:14.281280 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c2fdp\" (UniqueName: \"kubernetes.io/projected/5e2b0240-3697-4ee1-9052-5e72c8bf386a-kube-api-access-c2fdp\") pod \"openstack-operator-controller-manager-57cc59b9c6-9f256\" (UID: \"5e2b0240-3697-4ee1-9052-5e72c8bf386a\") " pod="openstack-operators/openstack-operator-controller-manager-57cc59b9c6-9f256" Sep 29 19:12:14 crc kubenswrapper[4792]: I0929 19:12:14.281354 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/5e2b0240-3697-4ee1-9052-5e72c8bf386a-cert\") pod \"openstack-operator-controller-manager-57cc59b9c6-9f256\" (UID: \"5e2b0240-3697-4ee1-9052-5e72c8bf386a\") " pod="openstack-operators/openstack-operator-controller-manager-57cc59b9c6-9f256" Sep 29 19:12:14 crc kubenswrapper[4792]: E0929 19:12:14.281462 4792 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Sep 29 19:12:14 crc kubenswrapper[4792]: E0929 19:12:14.281499 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/c0dd6d9d-3f07-4723-ae97-7adb0a4863b1-cert podName:c0dd6d9d-3f07-4723-ae97-7adb0a4863b1 nodeName:}" failed. No retries permitted until 2025-09-29 19:12:15.281486051 +0000 UTC m=+947.274793447 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/c0dd6d9d-3f07-4723-ae97-7adb0a4863b1-cert") pod "openstack-baremetal-operator-controller-manager-6d776955-44cz7" (UID: "c0dd6d9d-3f07-4723-ae97-7adb0a4863b1") : secret "openstack-baremetal-operator-webhook-server-cert" not found Sep 29 19:12:14 crc kubenswrapper[4792]: I0929 19:12:14.380519 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-79d8469568-vxkfn"] Sep 29 19:12:14 crc kubenswrapper[4792]: I0929 19:12:14.381941 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-manager-79d8469568-vxkfn" Sep 29 19:12:14 crc kubenswrapper[4792]: I0929 19:12:14.383643 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/5e2b0240-3697-4ee1-9052-5e72c8bf386a-cert\") pod \"openstack-operator-controller-manager-57cc59b9c6-9f256\" (UID: \"5e2b0240-3697-4ee1-9052-5e72c8bf386a\") " pod="openstack-operators/openstack-operator-controller-manager-57cc59b9c6-9f256" Sep 29 19:12:14 crc kubenswrapper[4792]: I0929 19:12:14.383713 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c2fdp\" (UniqueName: \"kubernetes.io/projected/5e2b0240-3697-4ee1-9052-5e72c8bf386a-kube-api-access-c2fdp\") pod \"openstack-operator-controller-manager-57cc59b9c6-9f256\" (UID: \"5e2b0240-3697-4ee1-9052-5e72c8bf386a\") " pod="openstack-operators/openstack-operator-controller-manager-57cc59b9c6-9f256" Sep 29 19:12:14 crc kubenswrapper[4792]: E0929 19:12:14.394755 4792 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Sep 29 19:12:14 crc kubenswrapper[4792]: E0929 19:12:14.394814 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5e2b0240-3697-4ee1-9052-5e72c8bf386a-cert podName:5e2b0240-3697-4ee1-9052-5e72c8bf386a nodeName:}" failed. No retries permitted until 2025-09-29 19:12:14.894797554 +0000 UTC m=+946.888104950 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/5e2b0240-3697-4ee1-9052-5e72c8bf386a-cert") pod "openstack-operator-controller-manager-57cc59b9c6-9f256" (UID: "5e2b0240-3697-4ee1-9052-5e72c8bf386a") : secret "webhook-server-cert" not found Sep 29 19:12:14 crc kubenswrapper[4792]: I0929 19:12:14.395626 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"rabbitmq-cluster-operator-controller-manager-dockercfg-x5dhw" Sep 29 19:12:14 crc kubenswrapper[4792]: I0929 19:12:14.430783 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c2fdp\" (UniqueName: \"kubernetes.io/projected/5e2b0240-3697-4ee1-9052-5e72c8bf386a-kube-api-access-c2fdp\") pod \"openstack-operator-controller-manager-57cc59b9c6-9f256\" (UID: \"5e2b0240-3697-4ee1-9052-5e72c8bf386a\") " pod="openstack-operators/openstack-operator-controller-manager-57cc59b9c6-9f256" Sep 29 19:12:14 crc kubenswrapper[4792]: I0929 19:12:14.461771 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-79d8469568-vxkfn"] Sep 29 19:12:14 crc kubenswrapper[4792]: I0929 19:12:14.489107 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9bsmq\" (UniqueName: \"kubernetes.io/projected/5bc872e6-ce23-49cc-8ae7-bf92e4edda47-kube-api-access-9bsmq\") pod \"rabbitmq-cluster-operator-manager-79d8469568-vxkfn\" (UID: \"5bc872e6-ce23-49cc-8ae7-bf92e4edda47\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-79d8469568-vxkfn" Sep 29 19:12:14 crc kubenswrapper[4792]: I0929 19:12:14.590075 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9bsmq\" (UniqueName: \"kubernetes.io/projected/5bc872e6-ce23-49cc-8ae7-bf92e4edda47-kube-api-access-9bsmq\") pod \"rabbitmq-cluster-operator-manager-79d8469568-vxkfn\" (UID: \"5bc872e6-ce23-49cc-8ae7-bf92e4edda47\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-79d8469568-vxkfn" Sep 29 19:12:14 crc kubenswrapper[4792]: I0929 19:12:14.613832 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9bsmq\" (UniqueName: \"kubernetes.io/projected/5bc872e6-ce23-49cc-8ae7-bf92e4edda47-kube-api-access-9bsmq\") pod \"rabbitmq-cluster-operator-manager-79d8469568-vxkfn\" (UID: \"5bc872e6-ce23-49cc-8ae7-bf92e4edda47\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-79d8469568-vxkfn" Sep 29 19:12:14 crc kubenswrapper[4792]: I0929 19:12:14.615492 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/cinder-operator-controller-manager-644bddb6d8-vjkgm"] Sep 29 19:12:14 crc kubenswrapper[4792]: W0929 19:12:14.624142 4792 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod7f29d397_4b2d_4668_91f6_744e22070f30.slice/crio-58272892cc448ded1876dcf6175cb9bf3fe8d40d7ad655bcd789d2031675af5d WatchSource:0}: Error finding container 58272892cc448ded1876dcf6175cb9bf3fe8d40d7ad655bcd789d2031675af5d: Status 404 returned error can't find the container with id 58272892cc448ded1876dcf6175cb9bf3fe8d40d7ad655bcd789d2031675af5d Sep 29 19:12:14 crc kubenswrapper[4792]: I0929 19:12:14.781259 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/heat-operator-controller-manager-5d889d78cf-fcg79"] Sep 29 19:12:14 crc kubenswrapper[4792]: I0929 19:12:14.786425 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-manager-79d8469568-vxkfn" Sep 29 19:12:14 crc kubenswrapper[4792]: I0929 19:12:14.794861 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/designate-operator-controller-manager-84f4f7b77b-h9csw"] Sep 29 19:12:14 crc kubenswrapper[4792]: I0929 19:12:14.881113 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-644bddb6d8-vjkgm" event={"ID":"7f29d397-4b2d-4668-91f6-744e22070f30","Type":"ContainerStarted","Data":"58272892cc448ded1876dcf6175cb9bf3fe8d40d7ad655bcd789d2031675af5d"} Sep 29 19:12:14 crc kubenswrapper[4792]: I0929 19:12:14.898300 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/5e2b0240-3697-4ee1-9052-5e72c8bf386a-cert\") pod \"openstack-operator-controller-manager-57cc59b9c6-9f256\" (UID: \"5e2b0240-3697-4ee1-9052-5e72c8bf386a\") " pod="openstack-operators/openstack-operator-controller-manager-57cc59b9c6-9f256" Sep 29 19:12:14 crc kubenswrapper[4792]: E0929 19:12:14.898616 4792 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Sep 29 19:12:14 crc kubenswrapper[4792]: E0929 19:12:14.898674 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5e2b0240-3697-4ee1-9052-5e72c8bf386a-cert podName:5e2b0240-3697-4ee1-9052-5e72c8bf386a nodeName:}" failed. No retries permitted until 2025-09-29 19:12:15.898657831 +0000 UTC m=+947.891965227 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/5e2b0240-3697-4ee1-9052-5e72c8bf386a-cert") pod "openstack-operator-controller-manager-57cc59b9c6-9f256" (UID: "5e2b0240-3697-4ee1-9052-5e72c8bf386a") : secret "webhook-server-cert" not found Sep 29 19:12:14 crc kubenswrapper[4792]: W0929 19:12:14.920483 4792 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podaa102219_aaa4_46c5_b783_519972688523.slice/crio-6258af462fba57c1821544dbfb5133b54e979c2e1acf466530d8f5cd41ded698 WatchSource:0}: Error finding container 6258af462fba57c1821544dbfb5133b54e979c2e1acf466530d8f5cd41ded698: Status 404 returned error can't find the container with id 6258af462fba57c1821544dbfb5133b54e979c2e1acf466530d8f5cd41ded698 Sep 29 19:12:15 crc kubenswrapper[4792]: I0929 19:12:15.104981 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/ca7e36bc-4aa5-414f-92a4-db59399217b9-cert\") pod \"infra-operator-controller-manager-7d857cc749-fn8fk\" (UID: \"ca7e36bc-4aa5-414f-92a4-db59399217b9\") " pod="openstack-operators/infra-operator-controller-manager-7d857cc749-fn8fk" Sep 29 19:12:15 crc kubenswrapper[4792]: I0929 19:12:15.116508 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/ca7e36bc-4aa5-414f-92a4-db59399217b9-cert\") pod \"infra-operator-controller-manager-7d857cc749-fn8fk\" (UID: \"ca7e36bc-4aa5-414f-92a4-db59399217b9\") " pod="openstack-operators/infra-operator-controller-manager-7d857cc749-fn8fk" Sep 29 19:12:15 crc kubenswrapper[4792]: I0929 19:12:15.213171 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-controller-manager-7d857cc749-fn8fk" Sep 29 19:12:15 crc kubenswrapper[4792]: I0929 19:12:15.239606 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/glance-operator-controller-manager-84958c4d49-j2crr"] Sep 29 19:12:15 crc kubenswrapper[4792]: I0929 19:12:15.307513 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/c0dd6d9d-3f07-4723-ae97-7adb0a4863b1-cert\") pod \"openstack-baremetal-operator-controller-manager-6d776955-44cz7\" (UID: \"c0dd6d9d-3f07-4723-ae97-7adb0a4863b1\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-6d776955-44cz7" Sep 29 19:12:15 crc kubenswrapper[4792]: I0929 19:12:15.310986 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/c0dd6d9d-3f07-4723-ae97-7adb0a4863b1-cert\") pod \"openstack-baremetal-operator-controller-manager-6d776955-44cz7\" (UID: \"c0dd6d9d-3f07-4723-ae97-7adb0a4863b1\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-6d776955-44cz7" Sep 29 19:12:15 crc kubenswrapper[4792]: I0929 19:12:15.462908 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-baremetal-operator-controller-manager-6d776955-44cz7" Sep 29 19:12:15 crc kubenswrapper[4792]: I0929 19:12:15.642348 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/watcher-operator-controller-manager-76669f99c-m88jp"] Sep 29 19:12:15 crc kubenswrapper[4792]: I0929 19:12:15.669900 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/keystone-operator-controller-manager-5bd55b4bff-swj98"] Sep 29 19:12:15 crc kubenswrapper[4792]: I0929 19:12:15.688511 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/barbican-operator-controller-manager-6ff8b75857-c6z65"] Sep 29 19:12:15 crc kubenswrapper[4792]: I0929 19:12:15.700936 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/octavia-operator-controller-manager-76fcc6dc7c-9wbkb"] Sep 29 19:12:15 crc kubenswrapper[4792]: I0929 19:12:15.711615 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ironic-operator-controller-manager-7975b88857-nfk4r"] Sep 29 19:12:15 crc kubenswrapper[4792]: I0929 19:12:15.747926 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/neutron-operator-controller-manager-64d7b59854-rrnhb"] Sep 29 19:12:15 crc kubenswrapper[4792]: I0929 19:12:15.777184 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-88c7-t4srt"] Sep 29 19:12:15 crc kubenswrapper[4792]: I0929 19:12:15.794876 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ovn-operator-controller-manager-9976ff44c-zc87x"] Sep 29 19:12:15 crc kubenswrapper[4792]: I0929 19:12:15.806639 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/horizon-operator-controller-manager-9f4696d94-d48vc"] Sep 29 19:12:15 crc kubenswrapper[4792]: I0929 19:12:15.808719 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-b8d54b5d7-x5h9k"] Sep 29 19:12:15 crc kubenswrapper[4792]: W0929 19:12:15.811874 4792 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podaf042430_9b25_44c8_8f30_19db90025d05.slice/crio-c5d4c25e9034d956a58b4eb0d07f88a589831728bad41691e3e46192280e85ca WatchSource:0}: Error finding container c5d4c25e9034d956a58b4eb0d07f88a589831728bad41691e3e46192280e85ca: Status 404 returned error can't find the container with id c5d4c25e9034d956a58b4eb0d07f88a589831728bad41691e3e46192280e85ca Sep 29 19:12:15 crc kubenswrapper[4792]: I0929 19:12:15.912075 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-76fcc6dc7c-9wbkb" event={"ID":"dd149347-201c-4ce2-abdd-d41e57d1813a","Type":"ContainerStarted","Data":"3e52288408f7db50e0c5f5d9338039b8ac0db4914839888707447422d6c2fc43"} Sep 29 19:12:15 crc kubenswrapper[4792]: I0929 19:12:15.916409 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/5e2b0240-3697-4ee1-9052-5e72c8bf386a-cert\") pod \"openstack-operator-controller-manager-57cc59b9c6-9f256\" (UID: \"5e2b0240-3697-4ee1-9052-5e72c8bf386a\") " pod="openstack-operators/openstack-operator-controller-manager-57cc59b9c6-9f256" Sep 29 19:12:15 crc kubenswrapper[4792]: I0929 19:12:15.917789 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-84958c4d49-j2crr" event={"ID":"bd8fdc17-d2f2-4644-8789-c8188f91ce61","Type":"ContainerStarted","Data":"2499355c3d353bdecee086c2cc5259c56bb665a2fc16c4a5810ad822ab582cb5"} Sep 29 19:12:15 crc kubenswrapper[4792]: I0929 19:12:15.924543 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/5e2b0240-3697-4ee1-9052-5e72c8bf386a-cert\") pod \"openstack-operator-controller-manager-57cc59b9c6-9f256\" (UID: \"5e2b0240-3697-4ee1-9052-5e72c8bf386a\") " pod="openstack-operators/openstack-operator-controller-manager-57cc59b9c6-9f256" Sep 29 19:12:15 crc kubenswrapper[4792]: I0929 19:12:15.925808 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-9f4696d94-d48vc" event={"ID":"1d9cd325-38fc-4c7c-bd2f-51b86aa23d2e","Type":"ContainerStarted","Data":"d0ff521cc24ea6429f188987cb1951082907d2c72cc79b8bc2d3c2db7601e251"} Sep 29 19:12:15 crc kubenswrapper[4792]: I0929 19:12:15.931439 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-7975b88857-nfk4r" event={"ID":"fdf51eb0-6b5f-43ad-ba01-c8ff12508dc0","Type":"ContainerStarted","Data":"0955577eaeca1b2d3d9d92317351799177ec941b01f0b80f568860ab4cad650e"} Sep 29 19:12:15 crc kubenswrapper[4792]: I0929 19:12:15.934231 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-64d7b59854-rrnhb" event={"ID":"020f5851-2dbc-464b-9217-6a3cb7a737a7","Type":"ContainerStarted","Data":"c016bde808f1d6381ffe8b06eb77a7131f690e0909b9fc19aa0756d3a5663fea"} Sep 29 19:12:15 crc kubenswrapper[4792]: I0929 19:12:15.955185 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-9976ff44c-zc87x" event={"ID":"30ffe357-8b65-4481-95f2-7b2e13fd5676","Type":"ContainerStarted","Data":"3422c77b123347c0784384b99bd5bebc16954528c31fd7d02f7646ac2b0cab92"} Sep 29 19:12:15 crc kubenswrapper[4792]: I0929 19:12:15.960565 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-manager-57cc59b9c6-9f256" Sep 29 19:12:15 crc kubenswrapper[4792]: I0929 19:12:15.960645 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/placement-operator-controller-manager-589c58c6c-f8qhj"] Sep 29 19:12:15 crc kubenswrapper[4792]: W0929 19:12:15.967101 4792 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod18b79acc_6db2_4b4f_8f85_0b65dfd800b3.slice/crio-7e76c0559193a3fbcf5f8ab8a78a664841c6578961bcfdf841339cd19c24e3b7 WatchSource:0}: Error finding container 7e76c0559193a3fbcf5f8ab8a78a664841c6578961bcfdf841339cd19c24e3b7: Status 404 returned error can't find the container with id 7e76c0559193a3fbcf5f8ab8a78a664841c6578961bcfdf841339cd19c24e3b7 Sep 29 19:12:15 crc kubenswrapper[4792]: I0929 19:12:15.967880 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-b8d54b5d7-x5h9k" event={"ID":"aa049eb9-e9cf-47c9-a06b-91e8c787e6c1","Type":"ContainerStarted","Data":"00869db9066958021b44e692bf0602db9d507f4657b3a7f04ae4d0e8a3786a99"} Sep 29 19:12:15 crc kubenswrapper[4792]: I0929 19:12:15.969766 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-76669f99c-m88jp" event={"ID":"5ae40942-75a6-41a6-877a-4070bd348d32","Type":"ContainerStarted","Data":"7851472b465cb6c3f4c2ee9d1c48019b4ea87e91d641f880bef219abb545e5cf"} Sep 29 19:12:15 crc kubenswrapper[4792]: I0929 19:12:15.976008 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-5bd55b4bff-swj98" event={"ID":"b9148442-b4dc-4926-920d-33c9a00172fa","Type":"ContainerStarted","Data":"6af34ba677dcf9098915829f7345f0f05650bed8c754726e718a0e4878d037ff"} Sep 29 19:12:16 crc kubenswrapper[4792]: I0929 19:12:16.009376 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/manila-operator-controller-manager-6d68dbc695-zzmf2"] Sep 29 19:12:16 crc kubenswrapper[4792]: I0929 19:12:16.011390 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-84f4f7b77b-h9csw" event={"ID":"aa102219-aaa4-46c5-b783-519972688523","Type":"ContainerStarted","Data":"6258af462fba57c1821544dbfb5133b54e979c2e1acf466530d8f5cd41ded698"} Sep 29 19:12:16 crc kubenswrapper[4792]: E0929 19:12:16.011760 4792 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/placement-operator@sha256:a6b3408d79df6b6d4a467e49defaa4a9d9c088c94d0605a4fee0030c9ccc84d2,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-c4x4t,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod placement-operator-controller-manager-589c58c6c-f8qhj_openstack-operators(18b79acc-6db2-4b4f-8f85-0b65dfd800b3): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Sep 29 19:12:16 crc kubenswrapper[4792]: I0929 19:12:16.015937 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-88c7-t4srt" event={"ID":"af042430-9b25-44c8-8f30-19db90025d05","Type":"ContainerStarted","Data":"c5d4c25e9034d956a58b4eb0d07f88a589831728bad41691e3e46192280e85ca"} Sep 29 19:12:16 crc kubenswrapper[4792]: I0929 19:12:16.035562 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/nova-operator-controller-manager-c7c776c96-4dfhr"] Sep 29 19:12:16 crc kubenswrapper[4792]: I0929 19:12:16.035585 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-5d889d78cf-fcg79" event={"ID":"1c191b6e-d1aa-4576-98da-db7178aed835","Type":"ContainerStarted","Data":"052a3e52e109a9549c9b90dcd61e0c506d58063424f4de191507021370fa818d"} Sep 29 19:12:16 crc kubenswrapper[4792]: E0929 19:12:16.059534 4792 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/nova-operator@sha256:057de94f9afa340adc34f9b25f8007d9cd2ba71bc8b5d77aac522add53b7caef,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-xnfqn,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod nova-operator-controller-manager-c7c776c96-4dfhr_openstack-operators(12482564-55ba-46c6-857c-de815cddedc7): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Sep 29 19:12:16 crc kubenswrapper[4792]: I0929 19:12:16.061343 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-6ff8b75857-c6z65" event={"ID":"ee957b59-f5b6-4306-b6a7-4550199fe910","Type":"ContainerStarted","Data":"fe1d05b3ffb411cb62f99a166dd3cc7ef53e7db805834449d00f91f415eb7619"} Sep 29 19:12:16 crc kubenswrapper[4792]: W0929 19:12:16.068471 4792 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod5be754f6_b295_4ca1_8f47_5a827e39580a.slice/crio-959f97e8b6a63a75038f18954a040d49c871e261d3bacb896f5ef1bd96a7706c WatchSource:0}: Error finding container 959f97e8b6a63a75038f18954a040d49c871e261d3bacb896f5ef1bd96a7706c: Status 404 returned error can't find the container with id 959f97e8b6a63a75038f18954a040d49c871e261d3bacb896f5ef1bd96a7706c Sep 29 19:12:16 crc kubenswrapper[4792]: E0929 19:12:16.087158 4792 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/swift-operator@sha256:3c6f7d737e0196ec302f44354228d783ad3b210a75703dda3b39c15c01a67e8c,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-p7k6l,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000660000,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod swift-operator-controller-manager-bc7dc7bd9-4p4rl_openstack-operators(5be754f6-b295-4ca1-8f47-5a827e39580a): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Sep 29 19:12:16 crc kubenswrapper[4792]: I0929 19:12:16.089059 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/swift-operator-controller-manager-bc7dc7bd9-4p4rl"] Sep 29 19:12:16 crc kubenswrapper[4792]: W0929 19:12:16.093159 4792 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod5bc872e6_ce23_49cc_8ae7_bf92e4edda47.slice/crio-68d2575380efb0cf96022783c37f0d8ae1615ae3c56da60395ac9725ec27e288 WatchSource:0}: Error finding container 68d2575380efb0cf96022783c37f0d8ae1615ae3c56da60395ac9725ec27e288: Status 404 returned error can't find the container with id 68d2575380efb0cf96022783c37f0d8ae1615ae3c56da60395ac9725ec27e288 Sep 29 19:12:16 crc kubenswrapper[4792]: I0929 19:12:16.108342 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-controller-manager-7d857cc749-fn8fk"] Sep 29 19:12:16 crc kubenswrapper[4792]: I0929 19:12:16.115917 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-79d8469568-vxkfn"] Sep 29 19:12:16 crc kubenswrapper[4792]: I0929 19:12:16.119862 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/test-operator-controller-manager-f66b554c6-frkgk"] Sep 29 19:12:16 crc kubenswrapper[4792]: E0929 19:12:16.129638 4792 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/test-operator@sha256:a303e460aec09217f90043b8ff19c01061af003b614833b33a593df9c00ddf80,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-sxfms,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod test-operator-controller-manager-f66b554c6-frkgk_openstack-operators(cc2b4990-0306-4b03-b344-b2e186883c4c): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Sep 29 19:12:16 crc kubenswrapper[4792]: E0929 19:12:16.129669 4792 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/infra-operator@sha256:de99ad053f95f132f62b38335b2e8bf22fc28acbd441c3814764d63b63ef755f,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:true,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{600 -3} {} 600m DecimalSI},memory: {{2147483648 0} {} 2Gi BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{536870912 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:cert,ReadOnly:true,MountPath:/tmp/k8s-webhook-server/serving-certs,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-j4cvx,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod infra-operator-controller-manager-7d857cc749-fn8fk_openstack-operators(ca7e36bc-4aa5-414f-92a4-db59399217b9): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Sep 29 19:12:16 crc kubenswrapper[4792]: I0929 19:12:16.293129 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-6d776955-44cz7"] Sep 29 19:12:16 crc kubenswrapper[4792]: E0929 19:12:16.653601 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/placement-operator-controller-manager-589c58c6c-f8qhj" podUID="18b79acc-6db2-4b4f-8f85-0b65dfd800b3" Sep 29 19:12:16 crc kubenswrapper[4792]: E0929 19:12:16.696659 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/infra-operator-controller-manager-7d857cc749-fn8fk" podUID="ca7e36bc-4aa5-414f-92a4-db59399217b9" Sep 29 19:12:16 crc kubenswrapper[4792]: E0929 19:12:16.717162 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/swift-operator-controller-manager-bc7dc7bd9-4p4rl" podUID="5be754f6-b295-4ca1-8f47-5a827e39580a" Sep 29 19:12:16 crc kubenswrapper[4792]: E0929 19:12:16.722032 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/nova-operator-controller-manager-c7c776c96-4dfhr" podUID="12482564-55ba-46c6-857c-de815cddedc7" Sep 29 19:12:16 crc kubenswrapper[4792]: E0929 19:12:16.736789 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/test-operator-controller-manager-f66b554c6-frkgk" podUID="cc2b4990-0306-4b03-b344-b2e186883c4c" Sep 29 19:12:16 crc kubenswrapper[4792]: I0929 19:12:16.930747 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-manager-57cc59b9c6-9f256"] Sep 29 19:12:16 crc kubenswrapper[4792]: W0929 19:12:16.952666 4792 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod5e2b0240_3697_4ee1_9052_5e72c8bf386a.slice/crio-a915c217af8a55ae092a7147b76f46fb5fb89ffb679d13b31a04efcc0d76074c WatchSource:0}: Error finding container a915c217af8a55ae092a7147b76f46fb5fb89ffb679d13b31a04efcc0d76074c: Status 404 returned error can't find the container with id a915c217af8a55ae092a7147b76f46fb5fb89ffb679d13b31a04efcc0d76074c Sep 29 19:12:17 crc kubenswrapper[4792]: I0929 19:12:17.129154 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-manager-57cc59b9c6-9f256" event={"ID":"5e2b0240-3697-4ee1-9052-5e72c8bf386a","Type":"ContainerStarted","Data":"a915c217af8a55ae092a7147b76f46fb5fb89ffb679d13b31a04efcc0d76074c"} Sep 29 19:12:17 crc kubenswrapper[4792]: I0929 19:12:17.171267 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-7d857cc749-fn8fk" event={"ID":"ca7e36bc-4aa5-414f-92a4-db59399217b9","Type":"ContainerStarted","Data":"1beae7596c09f4f7633e3eed3d22dd9ab3503c01a6ff02a394f0876471fcbbbe"} Sep 29 19:12:17 crc kubenswrapper[4792]: I0929 19:12:17.171313 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-7d857cc749-fn8fk" event={"ID":"ca7e36bc-4aa5-414f-92a4-db59399217b9","Type":"ContainerStarted","Data":"616d9c77ccd7573ddf4fc8b4a4872695277ec0530ed02cbfcf95b2fa1362a021"} Sep 29 19:12:17 crc kubenswrapper[4792]: E0929 19:12:17.175122 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/infra-operator@sha256:de99ad053f95f132f62b38335b2e8bf22fc28acbd441c3814764d63b63ef755f\\\"\"" pod="openstack-operators/infra-operator-controller-manager-7d857cc749-fn8fk" podUID="ca7e36bc-4aa5-414f-92a4-db59399217b9" Sep 29 19:12:17 crc kubenswrapper[4792]: I0929 19:12:17.175238 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-6d68dbc695-zzmf2" event={"ID":"49160b59-f488-40f9-b23d-a3bccc3c2cb9","Type":"ContainerStarted","Data":"ab84fd589e9f8054c14049964b7f1af5c3a301baf63a8c9d706d13679ff72cac"} Sep 29 19:12:17 crc kubenswrapper[4792]: I0929 19:12:17.181692 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-manager-79d8469568-vxkfn" event={"ID":"5bc872e6-ce23-49cc-8ae7-bf92e4edda47","Type":"ContainerStarted","Data":"68d2575380efb0cf96022783c37f0d8ae1615ae3c56da60395ac9725ec27e288"} Sep 29 19:12:17 crc kubenswrapper[4792]: I0929 19:12:17.184279 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-f66b554c6-frkgk" event={"ID":"cc2b4990-0306-4b03-b344-b2e186883c4c","Type":"ContainerStarted","Data":"554bb77999cefd0ef793e5ac2476e0452ac26739f72e4e5815a69679a3a89fb2"} Sep 29 19:12:17 crc kubenswrapper[4792]: I0929 19:12:17.184312 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-f66b554c6-frkgk" event={"ID":"cc2b4990-0306-4b03-b344-b2e186883c4c","Type":"ContainerStarted","Data":"6c44056a763cda0cb2f8387bbae1e90f466f68e7caf82c007637848a000b0656"} Sep 29 19:12:17 crc kubenswrapper[4792]: E0929 19:12:17.185778 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/test-operator@sha256:a303e460aec09217f90043b8ff19c01061af003b614833b33a593df9c00ddf80\\\"\"" pod="openstack-operators/test-operator-controller-manager-f66b554c6-frkgk" podUID="cc2b4990-0306-4b03-b344-b2e186883c4c" Sep 29 19:12:17 crc kubenswrapper[4792]: I0929 19:12:17.212401 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-6d776955-44cz7" event={"ID":"c0dd6d9d-3f07-4723-ae97-7adb0a4863b1","Type":"ContainerStarted","Data":"0b3c08a2927f96845deeca5c517a0e4f1067d322f65223cfc776879c070b54e6"} Sep 29 19:12:17 crc kubenswrapper[4792]: I0929 19:12:17.220765 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-bc7dc7bd9-4p4rl" event={"ID":"5be754f6-b295-4ca1-8f47-5a827e39580a","Type":"ContainerStarted","Data":"436a5aed9e9b2a362045c6377e0a572fe070e6868bf5a0fd3d4cbbf551c3ccfb"} Sep 29 19:12:17 crc kubenswrapper[4792]: I0929 19:12:17.220798 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-bc7dc7bd9-4p4rl" event={"ID":"5be754f6-b295-4ca1-8f47-5a827e39580a","Type":"ContainerStarted","Data":"959f97e8b6a63a75038f18954a040d49c871e261d3bacb896f5ef1bd96a7706c"} Sep 29 19:12:17 crc kubenswrapper[4792]: E0929 19:12:17.225046 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/swift-operator@sha256:3c6f7d737e0196ec302f44354228d783ad3b210a75703dda3b39c15c01a67e8c\\\"\"" pod="openstack-operators/swift-operator-controller-manager-bc7dc7bd9-4p4rl" podUID="5be754f6-b295-4ca1-8f47-5a827e39580a" Sep 29 19:12:17 crc kubenswrapper[4792]: I0929 19:12:17.228409 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-589c58c6c-f8qhj" event={"ID":"18b79acc-6db2-4b4f-8f85-0b65dfd800b3","Type":"ContainerStarted","Data":"a7e47cbaddde38a8be543b05418a4093e9f6562ac906bd9d58d82bd9d06765e2"} Sep 29 19:12:17 crc kubenswrapper[4792]: I0929 19:12:17.228466 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-589c58c6c-f8qhj" event={"ID":"18b79acc-6db2-4b4f-8f85-0b65dfd800b3","Type":"ContainerStarted","Data":"7e76c0559193a3fbcf5f8ab8a78a664841c6578961bcfdf841339cd19c24e3b7"} Sep 29 19:12:17 crc kubenswrapper[4792]: E0929 19:12:17.234443 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/placement-operator@sha256:a6b3408d79df6b6d4a467e49defaa4a9d9c088c94d0605a4fee0030c9ccc84d2\\\"\"" pod="openstack-operators/placement-operator-controller-manager-589c58c6c-f8qhj" podUID="18b79acc-6db2-4b4f-8f85-0b65dfd800b3" Sep 29 19:12:17 crc kubenswrapper[4792]: I0929 19:12:17.254113 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-c7c776c96-4dfhr" event={"ID":"12482564-55ba-46c6-857c-de815cddedc7","Type":"ContainerStarted","Data":"ff6697ec54af34bd3a1b46af475a696c652a84451d039913eb85d68124eae355"} Sep 29 19:12:17 crc kubenswrapper[4792]: I0929 19:12:17.254151 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-c7c776c96-4dfhr" event={"ID":"12482564-55ba-46c6-857c-de815cddedc7","Type":"ContainerStarted","Data":"e555d3d4a7fe2c2c84e7d49cb6c025d8afef68994b5d76a50ec6f86e30db3d4f"} Sep 29 19:12:17 crc kubenswrapper[4792]: E0929 19:12:17.272136 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/nova-operator@sha256:057de94f9afa340adc34f9b25f8007d9cd2ba71bc8b5d77aac522add53b7caef\\\"\"" pod="openstack-operators/nova-operator-controller-manager-c7c776c96-4dfhr" podUID="12482564-55ba-46c6-857c-de815cddedc7" Sep 29 19:12:18 crc kubenswrapper[4792]: I0929 19:12:18.288619 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-manager-57cc59b9c6-9f256" event={"ID":"5e2b0240-3697-4ee1-9052-5e72c8bf386a","Type":"ContainerStarted","Data":"48ab5e12df280548c8d15a2fdcc857e438aeaa0ad86121cad5fcdd5f4e23309b"} Sep 29 19:12:18 crc kubenswrapper[4792]: I0929 19:12:18.288949 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-manager-57cc59b9c6-9f256" event={"ID":"5e2b0240-3697-4ee1-9052-5e72c8bf386a","Type":"ContainerStarted","Data":"ac9d0aecf1c63cf6717ed45875a3699146eb6d4788b1e006bd6c0709c19b2d50"} Sep 29 19:12:18 crc kubenswrapper[4792]: I0929 19:12:18.288970 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-controller-manager-57cc59b9c6-9f256" Sep 29 19:12:18 crc kubenswrapper[4792]: E0929 19:12:18.296694 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/test-operator@sha256:a303e460aec09217f90043b8ff19c01061af003b614833b33a593df9c00ddf80\\\"\"" pod="openstack-operators/test-operator-controller-manager-f66b554c6-frkgk" podUID="cc2b4990-0306-4b03-b344-b2e186883c4c" Sep 29 19:12:18 crc kubenswrapper[4792]: E0929 19:12:18.296794 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/swift-operator@sha256:3c6f7d737e0196ec302f44354228d783ad3b210a75703dda3b39c15c01a67e8c\\\"\"" pod="openstack-operators/swift-operator-controller-manager-bc7dc7bd9-4p4rl" podUID="5be754f6-b295-4ca1-8f47-5a827e39580a" Sep 29 19:12:18 crc kubenswrapper[4792]: E0929 19:12:18.296882 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/placement-operator@sha256:a6b3408d79df6b6d4a467e49defaa4a9d9c088c94d0605a4fee0030c9ccc84d2\\\"\"" pod="openstack-operators/placement-operator-controller-manager-589c58c6c-f8qhj" podUID="18b79acc-6db2-4b4f-8f85-0b65dfd800b3" Sep 29 19:12:18 crc kubenswrapper[4792]: E0929 19:12:18.296935 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/infra-operator@sha256:de99ad053f95f132f62b38335b2e8bf22fc28acbd441c3814764d63b63ef755f\\\"\"" pod="openstack-operators/infra-operator-controller-manager-7d857cc749-fn8fk" podUID="ca7e36bc-4aa5-414f-92a4-db59399217b9" Sep 29 19:12:18 crc kubenswrapper[4792]: E0929 19:12:18.296982 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/nova-operator@sha256:057de94f9afa340adc34f9b25f8007d9cd2ba71bc8b5d77aac522add53b7caef\\\"\"" pod="openstack-operators/nova-operator-controller-manager-c7c776c96-4dfhr" podUID="12482564-55ba-46c6-857c-de815cddedc7" Sep 29 19:12:19 crc kubenswrapper[4792]: I0929 19:12:19.088715 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-controller-manager-57cc59b9c6-9f256" podStartSLOduration=5.088692786 podStartE2EDuration="5.088692786s" podCreationTimestamp="2025-09-29 19:12:14 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 19:12:18.497152938 +0000 UTC m=+950.490460344" watchObservedRunningTime="2025-09-29 19:12:19.088692786 +0000 UTC m=+951.082000182" Sep 29 19:12:25 crc kubenswrapper[4792]: I0929 19:12:25.966918 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-controller-manager-57cc59b9c6-9f256" Sep 29 19:12:32 crc kubenswrapper[4792]: E0929 19:12:32.377843 4792 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/octavia-operator@sha256:4d08afd31dc5ded10c54a5541f514ac351e9b40a183285b3db27d0757a6354c8" Sep 29 19:12:32 crc kubenswrapper[4792]: E0929 19:12:32.378594 4792 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/octavia-operator@sha256:4d08afd31dc5ded10c54a5541f514ac351e9b40a183285b3db27d0757a6354c8,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-tv5g9,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod octavia-operator-controller-manager-76fcc6dc7c-9wbkb_openstack-operators(dd149347-201c-4ce2-abdd-d41e57d1813a): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Sep 29 19:12:33 crc kubenswrapper[4792]: E0929 19:12:33.740044 4792 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/ironic-operator@sha256:e6f1ed6b386f77415c2a44e770d98ab6d16b6f6b494c4d1b4ac4b46368c4a4e6" Sep 29 19:12:33 crc kubenswrapper[4792]: E0929 19:12:33.740366 4792 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/ironic-operator@sha256:e6f1ed6b386f77415c2a44e770d98ab6d16b6f6b494c4d1b4ac4b46368c4a4e6,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-gprll,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod ironic-operator-controller-manager-7975b88857-nfk4r_openstack-operators(fdf51eb0-6b5f-43ad-ba01-c8ff12508dc0): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Sep 29 19:12:35 crc kubenswrapper[4792]: E0929 19:12:35.398656 4792 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/ovn-operator@sha256:1051afc168038fb814f75e7a5f07c588b295a83ebd143dcd8b46d799e31ad302" Sep 29 19:12:35 crc kubenswrapper[4792]: E0929 19:12:35.399044 4792 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/ovn-operator@sha256:1051afc168038fb814f75e7a5f07c588b295a83ebd143dcd8b46d799e31ad302,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-qvffj,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod ovn-operator-controller-manager-9976ff44c-zc87x_openstack-operators(30ffe357-8b65-4481-95f2-7b2e13fd5676): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Sep 29 19:12:35 crc kubenswrapper[4792]: E0929 19:12:35.837096 4792 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/glance-operator@sha256:21792a2317c0a55e40b2a02a7d5d4682b76538ed2a2e0633199aa395e60ecc72" Sep 29 19:12:35 crc kubenswrapper[4792]: E0929 19:12:35.837300 4792 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/glance-operator@sha256:21792a2317c0a55e40b2a02a7d5d4682b76538ed2a2e0633199aa395e60ecc72,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-g4p7v,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod glance-operator-controller-manager-84958c4d49-j2crr_openstack-operators(bd8fdc17-d2f2-4644-8789-c8188f91ce61): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Sep 29 19:12:37 crc kubenswrapper[4792]: E0929 19:12:37.238583 4792 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/barbican-operator@sha256:bb39758cc8cd0d2cd02841dc81b53fd88647e2db15ee16cdd8c44d4098a942fd" Sep 29 19:12:37 crc kubenswrapper[4792]: E0929 19:12:37.239242 4792 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/barbican-operator@sha256:bb39758cc8cd0d2cd02841dc81b53fd88647e2db15ee16cdd8c44d4098a942fd,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-xv9b5,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod barbican-operator-controller-manager-6ff8b75857-c6z65_openstack-operators(ee957b59-f5b6-4306-b6a7-4550199fe910): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Sep 29 19:12:37 crc kubenswrapper[4792]: E0929 19:12:37.660958 4792 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/telemetry-operator@sha256:8fdf377daf05e2fa7346505017078fa81981dd945bf635a64c8022633c68118f" Sep 29 19:12:37 crc kubenswrapper[4792]: E0929 19:12:37.661173 4792 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/telemetry-operator@sha256:8fdf377daf05e2fa7346505017078fa81981dd945bf635a64c8022633c68118f,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-9cxwj,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod telemetry-operator-controller-manager-b8d54b5d7-x5h9k_openstack-operators(aa049eb9-e9cf-47c9-a06b-91e8c787e6c1): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Sep 29 19:12:39 crc kubenswrapper[4792]: E0929 19:12:39.474371 4792 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/keystone-operator@sha256:23fcec0642cbd40af10bca0c5d4e538662d21eda98d6dfec37c38b4d7a47191a" Sep 29 19:12:39 crc kubenswrapper[4792]: E0929 19:12:39.474561 4792 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/keystone-operator@sha256:23fcec0642cbd40af10bca0c5d4e538662d21eda98d6dfec37c38b4d7a47191a,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-n5hzm,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod keystone-operator-controller-manager-5bd55b4bff-swj98_openstack-operators(b9148442-b4dc-4926-920d-33c9a00172fa): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Sep 29 19:12:39 crc kubenswrapper[4792]: E0929 19:12:39.883118 4792 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/watcher-operator@sha256:7169dfadf5f5589f14ca52700d2eba991c2a0c7733f6a1ea795752d993d7f61b" Sep 29 19:12:39 crc kubenswrapper[4792]: E0929 19:12:39.883539 4792 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/watcher-operator@sha256:7169dfadf5f5589f14ca52700d2eba991c2a0c7733f6a1ea795752d993d7f61b,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-tckrh,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod watcher-operator-controller-manager-76669f99c-m88jp_openstack-operators(5ae40942-75a6-41a6-877a-4070bd348d32): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Sep 29 19:12:40 crc kubenswrapper[4792]: E0929 19:12:40.261632 4792 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/manila-operator@sha256:4cdb30423c14ab48888aeeb699259bd9051284ec9f874ed9bab94c7965f45884" Sep 29 19:12:40 crc kubenswrapper[4792]: E0929 19:12:40.261967 4792 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/manila-operator@sha256:4cdb30423c14ab48888aeeb699259bd9051284ec9f874ed9bab94c7965f45884,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-5sh8q,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod manila-operator-controller-manager-6d68dbc695-zzmf2_openstack-operators(49160b59-f488-40f9-b23d-a3bccc3c2cb9): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Sep 29 19:12:45 crc kubenswrapper[4792]: E0929 19:12:45.976803 4792 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:225524223bf2a7f3a4ce95958fc9ca6fdab02745fb70374e8ff5bf1ddaceda4b" Sep 29 19:12:45 crc kubenswrapper[4792]: E0929 19:12:45.977302 4792 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:operator,Image:quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:225524223bf2a7f3a4ce95958fc9ca6fdab02745fb70374e8ff5bf1ddaceda4b,Command:[/manager],Args:[],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:metrics,HostPort:0,ContainerPort:9782,Protocol:TCP,HostIP:,},},Env:[]EnvVar{EnvVar{Name:OPERATOR_NAMESPACE,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:metadata.namespace,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{200 -3} {} 200m DecimalSI},memory: {{524288000 0} {} 500Mi BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-9bsmq,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000660000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod rabbitmq-cluster-operator-manager-79d8469568-vxkfn_openstack-operators(5bc872e6-ce23-49cc-8ae7-bf92e4edda47): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Sep 29 19:12:45 crc kubenswrapper[4792]: E0929 19:12:45.978483 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-79d8469568-vxkfn" podUID="5bc872e6-ce23-49cc-8ae7-bf92e4edda47" Sep 29 19:12:46 crc kubenswrapper[4792]: E0929 19:12:46.450310 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/ironic-operator-controller-manager-7975b88857-nfk4r" podUID="fdf51eb0-6b5f-43ad-ba01-c8ff12508dc0" Sep 29 19:12:46 crc kubenswrapper[4792]: E0929 19:12:46.452843 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/glance-operator-controller-manager-84958c4d49-j2crr" podUID="bd8fdc17-d2f2-4644-8789-c8188f91ce61" Sep 29 19:12:46 crc kubenswrapper[4792]: E0929 19:12:46.464349 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/octavia-operator-controller-manager-76fcc6dc7c-9wbkb" podUID="dd149347-201c-4ce2-abdd-d41e57d1813a" Sep 29 19:12:46 crc kubenswrapper[4792]: I0929 19:12:46.493755 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-7975b88857-nfk4r" event={"ID":"fdf51eb0-6b5f-43ad-ba01-c8ff12508dc0","Type":"ContainerStarted","Data":"7b4fce2c5d6a7dcd4461800b9f2b518193ed0b86fa2e74464ab6048414ca1108"} Sep 29 19:12:46 crc kubenswrapper[4792]: I0929 19:12:46.496136 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-644bddb6d8-vjkgm" event={"ID":"7f29d397-4b2d-4668-91f6-744e22070f30","Type":"ContainerStarted","Data":"2ae3094c56702d0de751e6a6b89aa9d9168c4256cfcd71ee74c8a8b515e14b98"} Sep 29 19:12:46 crc kubenswrapper[4792]: I0929 19:12:46.500495 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-76fcc6dc7c-9wbkb" event={"ID":"dd149347-201c-4ce2-abdd-d41e57d1813a","Type":"ContainerStarted","Data":"1e4cbdc230d2b6740a9d5e31ee482cd20b051fbc5db82fb060cf37e11cf85cb6"} Sep 29 19:12:46 crc kubenswrapper[4792]: I0929 19:12:46.503832 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-84958c4d49-j2crr" event={"ID":"bd8fdc17-d2f2-4644-8789-c8188f91ce61","Type":"ContainerStarted","Data":"ceedfcce3285669942b74e3eefbb7bd02b3e522d2edd5fbefd1c484867ea51fe"} Sep 29 19:12:46 crc kubenswrapper[4792]: E0929 19:12:46.504481 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:225524223bf2a7f3a4ce95958fc9ca6fdab02745fb70374e8ff5bf1ddaceda4b\\\"\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-79d8469568-vxkfn" podUID="5bc872e6-ce23-49cc-8ae7-bf92e4edda47" Sep 29 19:12:46 crc kubenswrapper[4792]: E0929 19:12:46.532786 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/ovn-operator-controller-manager-9976ff44c-zc87x" podUID="30ffe357-8b65-4481-95f2-7b2e13fd5676" Sep 29 19:12:46 crc kubenswrapper[4792]: E0929 19:12:46.540118 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/telemetry-operator-controller-manager-b8d54b5d7-x5h9k" podUID="aa049eb9-e9cf-47c9-a06b-91e8c787e6c1" Sep 29 19:12:46 crc kubenswrapper[4792]: E0929 19:12:46.551168 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/manila-operator-controller-manager-6d68dbc695-zzmf2" podUID="49160b59-f488-40f9-b23d-a3bccc3c2cb9" Sep 29 19:12:46 crc kubenswrapper[4792]: E0929 19:12:46.639243 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/barbican-operator-controller-manager-6ff8b75857-c6z65" podUID="ee957b59-f5b6-4306-b6a7-4550199fe910" Sep 29 19:12:46 crc kubenswrapper[4792]: E0929 19:12:46.718871 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/watcher-operator-controller-manager-76669f99c-m88jp" podUID="5ae40942-75a6-41a6-877a-4070bd348d32" Sep 29 19:12:47 crc kubenswrapper[4792]: E0929 19:12:47.002188 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/keystone-operator-controller-manager-5bd55b4bff-swj98" podUID="b9148442-b4dc-4926-920d-33c9a00172fa" Sep 29 19:12:47 crc kubenswrapper[4792]: I0929 19:12:47.513149 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-6d776955-44cz7" event={"ID":"c0dd6d9d-3f07-4723-ae97-7adb0a4863b1","Type":"ContainerStarted","Data":"efa3dae1bb81473ce700a7a75f9d437dbf8c3625cda0759612e81f9807754b5e"} Sep 29 19:12:47 crc kubenswrapper[4792]: I0929 19:12:47.514756 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-6d68dbc695-zzmf2" event={"ID":"49160b59-f488-40f9-b23d-a3bccc3c2cb9","Type":"ContainerStarted","Data":"df7674555017cc0d186cfe02754408b539bff8bb53b545b11cb00136f365cdfc"} Sep 29 19:12:47 crc kubenswrapper[4792]: I0929 19:12:47.516634 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-9f4696d94-d48vc" event={"ID":"1d9cd325-38fc-4c7c-bd2f-51b86aa23d2e","Type":"ContainerStarted","Data":"215c363348cfd612e3e635df099edca5e9613c57d31e01b7386d5c20cbde52fd"} Sep 29 19:12:47 crc kubenswrapper[4792]: I0929 19:12:47.516685 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-9f4696d94-d48vc" event={"ID":"1d9cd325-38fc-4c7c-bd2f-51b86aa23d2e","Type":"ContainerStarted","Data":"79ebad7002b1143f7e9ff6731a9b91fce3f67f2c69cc864f485447635bc9a527"} Sep 29 19:12:47 crc kubenswrapper[4792]: I0929 19:12:47.517685 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/horizon-operator-controller-manager-9f4696d94-d48vc" Sep 29 19:12:47 crc kubenswrapper[4792]: I0929 19:12:47.519208 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-64d7b59854-rrnhb" event={"ID":"020f5851-2dbc-464b-9217-6a3cb7a737a7","Type":"ContainerStarted","Data":"3ea53888c6be798fba28cc0bc33477ec1d5c48e350bfaad3a36ec4f6f5e00200"} Sep 29 19:12:47 crc kubenswrapper[4792]: I0929 19:12:47.521213 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-9976ff44c-zc87x" event={"ID":"30ffe357-8b65-4481-95f2-7b2e13fd5676","Type":"ContainerStarted","Data":"ce109f49deac921f775808e11f86ca38dca38325b8b87b1a9fb24b0b7f14ec08"} Sep 29 19:12:47 crc kubenswrapper[4792]: I0929 19:12:47.525891 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-6ff8b75857-c6z65" event={"ID":"ee957b59-f5b6-4306-b6a7-4550199fe910","Type":"ContainerStarted","Data":"0a2452da4d55eb5ecb9a41f360ab23c96bbbf5447653a86b3c15b53bb72e63ec"} Sep 29 19:12:47 crc kubenswrapper[4792]: I0929 19:12:47.527619 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-76669f99c-m88jp" event={"ID":"5ae40942-75a6-41a6-877a-4070bd348d32","Type":"ContainerStarted","Data":"89ea3858e06824c5cfa937f5e19952e92f5e37d86c74fbd7ac8f8c6800b49428"} Sep 29 19:12:47 crc kubenswrapper[4792]: I0929 19:12:47.530668 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-f66b554c6-frkgk" event={"ID":"cc2b4990-0306-4b03-b344-b2e186883c4c","Type":"ContainerStarted","Data":"f8faea814809cd97935e63d7638e258bcce5c07c8660168ea6e3a82400bd2f19"} Sep 29 19:12:47 crc kubenswrapper[4792]: I0929 19:12:47.530816 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/test-operator-controller-manager-f66b554c6-frkgk" Sep 29 19:12:47 crc kubenswrapper[4792]: I0929 19:12:47.532447 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-c7c776c96-4dfhr" event={"ID":"12482564-55ba-46c6-857c-de815cddedc7","Type":"ContainerStarted","Data":"34d515734662dea069ae02583b2d43098589eaf11d770b4dd03a79c002d542bf"} Sep 29 19:12:47 crc kubenswrapper[4792]: I0929 19:12:47.532643 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/nova-operator-controller-manager-c7c776c96-4dfhr" Sep 29 19:12:47 crc kubenswrapper[4792]: I0929 19:12:47.533770 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-7d857cc749-fn8fk" event={"ID":"ca7e36bc-4aa5-414f-92a4-db59399217b9","Type":"ContainerStarted","Data":"3adba232bc3ddee46fac774bf911b169da90d25f7b07dfe8435cb70f51a5b3ca"} Sep 29 19:12:47 crc kubenswrapper[4792]: I0929 19:12:47.534254 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/infra-operator-controller-manager-7d857cc749-fn8fk" Sep 29 19:12:47 crc kubenswrapper[4792]: I0929 19:12:47.535539 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-bc7dc7bd9-4p4rl" event={"ID":"5be754f6-b295-4ca1-8f47-5a827e39580a","Type":"ContainerStarted","Data":"a93e07d92f83642634a867628bf30bc513960e63260d8f288df690bbc073b51d"} Sep 29 19:12:47 crc kubenswrapper[4792]: I0929 19:12:47.536021 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/swift-operator-controller-manager-bc7dc7bd9-4p4rl" Sep 29 19:12:47 crc kubenswrapper[4792]: I0929 19:12:47.537351 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-b8d54b5d7-x5h9k" event={"ID":"aa049eb9-e9cf-47c9-a06b-91e8c787e6c1","Type":"ContainerStarted","Data":"0e2c952386d2751e23a1660fe63de7d1f8b4e55768bb887b552e4f66a0f29049"} Sep 29 19:12:47 crc kubenswrapper[4792]: I0929 19:12:47.539059 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-644bddb6d8-vjkgm" event={"ID":"7f29d397-4b2d-4668-91f6-744e22070f30","Type":"ContainerStarted","Data":"5d7e85749bcbf9bbc7aa75c47a93b687e9a4fcbc534d247a869c913d914e40c3"} Sep 29 19:12:47 crc kubenswrapper[4792]: I0929 19:12:47.539188 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/cinder-operator-controller-manager-644bddb6d8-vjkgm" Sep 29 19:12:47 crc kubenswrapper[4792]: I0929 19:12:47.540892 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-88c7-t4srt" event={"ID":"af042430-9b25-44c8-8f30-19db90025d05","Type":"ContainerStarted","Data":"84e1b5e2e6f0731163c63d26a27c9612e15dbfab807db80b80dc344f1bccffed"} Sep 29 19:12:47 crc kubenswrapper[4792]: I0929 19:12:47.542505 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-5d889d78cf-fcg79" event={"ID":"1c191b6e-d1aa-4576-98da-db7178aed835","Type":"ContainerStarted","Data":"2ac697723696b845c802618f9ffc376c50a2d1c69228974d9d7218b23a50718a"} Sep 29 19:12:47 crc kubenswrapper[4792]: I0929 19:12:47.547834 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-589c58c6c-f8qhj" event={"ID":"18b79acc-6db2-4b4f-8f85-0b65dfd800b3","Type":"ContainerStarted","Data":"0077046a011292de3d8e9348e57076628daa9ab2042062f213c846596d0efa7f"} Sep 29 19:12:47 crc kubenswrapper[4792]: I0929 19:12:47.548319 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/placement-operator-controller-manager-589c58c6c-f8qhj" Sep 29 19:12:47 crc kubenswrapper[4792]: I0929 19:12:47.549341 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-5bd55b4bff-swj98" event={"ID":"b9148442-b4dc-4926-920d-33c9a00172fa","Type":"ContainerStarted","Data":"4d640fe930f17748c3c7e66e13b38002c9faeac632efddfe68d98c4d3bcf6a9e"} Sep 29 19:12:47 crc kubenswrapper[4792]: I0929 19:12:47.551050 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-84f4f7b77b-h9csw" event={"ID":"aa102219-aaa4-46c5-b783-519972688523","Type":"ContainerStarted","Data":"1409a1606b081367d7996a0ae569d59dd833177f248ff6b37d2e1afab400f06b"} Sep 29 19:12:47 crc kubenswrapper[4792]: I0929 19:12:47.597219 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/horizon-operator-controller-manager-9f4696d94-d48vc" podStartSLOduration=5.231263901 podStartE2EDuration="34.597200604s" podCreationTimestamp="2025-09-29 19:12:13 +0000 UTC" firstStartedPulling="2025-09-29 19:12:15.828424092 +0000 UTC m=+947.821731488" lastFinishedPulling="2025-09-29 19:12:45.194360785 +0000 UTC m=+977.187668191" observedRunningTime="2025-09-29 19:12:47.593490861 +0000 UTC m=+979.586798257" watchObservedRunningTime="2025-09-29 19:12:47.597200604 +0000 UTC m=+979.590508000" Sep 29 19:12:47 crc kubenswrapper[4792]: I0929 19:12:47.627959 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/test-operator-controller-manager-f66b554c6-frkgk" podStartSLOduration=4.672148538 podStartE2EDuration="34.627941273s" podCreationTimestamp="2025-09-29 19:12:13 +0000 UTC" firstStartedPulling="2025-09-29 19:12:16.129480426 +0000 UTC m=+948.122787822" lastFinishedPulling="2025-09-29 19:12:46.085273161 +0000 UTC m=+978.078580557" observedRunningTime="2025-09-29 19:12:47.624732471 +0000 UTC m=+979.618039877" watchObservedRunningTime="2025-09-29 19:12:47.627941273 +0000 UTC m=+979.621248659" Sep 29 19:12:47 crc kubenswrapper[4792]: I0929 19:12:47.689696 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/nova-operator-controller-manager-c7c776c96-4dfhr" podStartSLOduration=4.610333489 podStartE2EDuration="34.689677087s" podCreationTimestamp="2025-09-29 19:12:13 +0000 UTC" firstStartedPulling="2025-09-29 19:12:16.059378382 +0000 UTC m=+948.052685778" lastFinishedPulling="2025-09-29 19:12:46.13872199 +0000 UTC m=+978.132029376" observedRunningTime="2025-09-29 19:12:47.687778225 +0000 UTC m=+979.681085631" watchObservedRunningTime="2025-09-29 19:12:47.689677087 +0000 UTC m=+979.682984483" Sep 29 19:12:47 crc kubenswrapper[4792]: I0929 19:12:47.751376 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/cinder-operator-controller-manager-644bddb6d8-vjkgm" podStartSLOduration=4.195699859 podStartE2EDuration="34.75135865s" podCreationTimestamp="2025-09-29 19:12:13 +0000 UTC" firstStartedPulling="2025-09-29 19:12:14.637445695 +0000 UTC m=+946.630753091" lastFinishedPulling="2025-09-29 19:12:45.193104466 +0000 UTC m=+977.186411882" observedRunningTime="2025-09-29 19:12:47.747125045 +0000 UTC m=+979.740432441" watchObservedRunningTime="2025-09-29 19:12:47.75135865 +0000 UTC m=+979.744666046" Sep 29 19:12:47 crc kubenswrapper[4792]: I0929 19:12:47.763145 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/swift-operator-controller-manager-bc7dc7bd9-4p4rl" podStartSLOduration=4.71673372 podStartE2EDuration="34.763125644s" podCreationTimestamp="2025-09-29 19:12:13 +0000 UTC" firstStartedPulling="2025-09-29 19:12:16.087015677 +0000 UTC m=+948.080323073" lastFinishedPulling="2025-09-29 19:12:46.133407601 +0000 UTC m=+978.126714997" observedRunningTime="2025-09-29 19:12:47.762110042 +0000 UTC m=+979.755417458" watchObservedRunningTime="2025-09-29 19:12:47.763125644 +0000 UTC m=+979.756433040" Sep 29 19:12:47 crc kubenswrapper[4792]: I0929 19:12:47.881435 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/infra-operator-controller-manager-7d857cc749-fn8fk" podStartSLOduration=4.877458382 podStartE2EDuration="34.881419247s" podCreationTimestamp="2025-09-29 19:12:13 +0000 UTC" firstStartedPulling="2025-09-29 19:12:16.129430385 +0000 UTC m=+948.122737771" lastFinishedPulling="2025-09-29 19:12:46.13339124 +0000 UTC m=+978.126698636" observedRunningTime="2025-09-29 19:12:47.848669142 +0000 UTC m=+979.841976548" watchObservedRunningTime="2025-09-29 19:12:47.881419247 +0000 UTC m=+979.874726643" Sep 29 19:12:47 crc kubenswrapper[4792]: I0929 19:12:47.911524 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/placement-operator-controller-manager-589c58c6c-f8qhj" podStartSLOduration=4.875941646 podStartE2EDuration="34.911508911s" podCreationTimestamp="2025-09-29 19:12:13 +0000 UTC" firstStartedPulling="2025-09-29 19:12:16.011612452 +0000 UTC m=+948.004919848" lastFinishedPulling="2025-09-29 19:12:46.047179717 +0000 UTC m=+978.040487113" observedRunningTime="2025-09-29 19:12:47.906652753 +0000 UTC m=+979.899960149" watchObservedRunningTime="2025-09-29 19:12:47.911508911 +0000 UTC m=+979.904816307" Sep 29 19:12:48 crc kubenswrapper[4792]: E0929 19:12:48.204904 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/keystone-operator@sha256:23fcec0642cbd40af10bca0c5d4e538662d21eda98d6dfec37c38b4d7a47191a\\\"\"" pod="openstack-operators/keystone-operator-controller-manager-5bd55b4bff-swj98" podUID="b9148442-b4dc-4926-920d-33c9a00172fa" Sep 29 19:12:48 crc kubenswrapper[4792]: E0929 19:12:48.210609 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/watcher-operator@sha256:7169dfadf5f5589f14ca52700d2eba991c2a0c7733f6a1ea795752d993d7f61b\\\"\"" pod="openstack-operators/watcher-operator-controller-manager-76669f99c-m88jp" podUID="5ae40942-75a6-41a6-877a-4070bd348d32" Sep 29 19:12:48 crc kubenswrapper[4792]: E0929 19:12:48.210685 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/manila-operator@sha256:4cdb30423c14ab48888aeeb699259bd9051284ec9f874ed9bab94c7965f45884\\\"\"" pod="openstack-operators/manila-operator-controller-manager-6d68dbc695-zzmf2" podUID="49160b59-f488-40f9-b23d-a3bccc3c2cb9" Sep 29 19:12:48 crc kubenswrapper[4792]: I0929 19:12:48.559013 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-76fcc6dc7c-9wbkb" event={"ID":"dd149347-201c-4ce2-abdd-d41e57d1813a","Type":"ContainerStarted","Data":"10947ed171941c80f484fa8e41fa5b099f36d7b3bcea89d9f0e0fceada0281b9"} Sep 29 19:12:48 crc kubenswrapper[4792]: I0929 19:12:48.559400 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/octavia-operator-controller-manager-76fcc6dc7c-9wbkb" Sep 29 19:12:48 crc kubenswrapper[4792]: I0929 19:12:48.563232 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-84958c4d49-j2crr" event={"ID":"bd8fdc17-d2f2-4644-8789-c8188f91ce61","Type":"ContainerStarted","Data":"462eef1059f16b8c54546ac778cce9d3a8f9ba9c345e732d76355df09013cbc2"} Sep 29 19:12:48 crc kubenswrapper[4792]: I0929 19:12:48.563840 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/glance-operator-controller-manager-84958c4d49-j2crr" Sep 29 19:12:48 crc kubenswrapper[4792]: I0929 19:12:48.568716 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-88c7-t4srt" event={"ID":"af042430-9b25-44c8-8f30-19db90025d05","Type":"ContainerStarted","Data":"ce5e1881797334483997b70cd6ed51df93f6b82a14e846566e3184c786a8cce3"} Sep 29 19:12:48 crc kubenswrapper[4792]: I0929 19:12:48.569251 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/mariadb-operator-controller-manager-88c7-t4srt" Sep 29 19:12:48 crc kubenswrapper[4792]: I0929 19:12:48.570783 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-5d889d78cf-fcg79" event={"ID":"1c191b6e-d1aa-4576-98da-db7178aed835","Type":"ContainerStarted","Data":"93d0682165cf1088fc0b3ed493377c6222bb459e6a84ef967429ff7f56e2d946"} Sep 29 19:12:48 crc kubenswrapper[4792]: I0929 19:12:48.571205 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/heat-operator-controller-manager-5d889d78cf-fcg79" Sep 29 19:12:48 crc kubenswrapper[4792]: I0929 19:12:48.572526 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-84f4f7b77b-h9csw" event={"ID":"aa102219-aaa4-46c5-b783-519972688523","Type":"ContainerStarted","Data":"036c10ffe5913ba1f283c4c7507a3d4a2ec485afb4efe6c81ae34ed21c2eae3c"} Sep 29 19:12:48 crc kubenswrapper[4792]: I0929 19:12:48.572597 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/designate-operator-controller-manager-84f4f7b77b-h9csw" Sep 29 19:12:48 crc kubenswrapper[4792]: I0929 19:12:48.574149 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-7975b88857-nfk4r" event={"ID":"fdf51eb0-6b5f-43ad-ba01-c8ff12508dc0","Type":"ContainerStarted","Data":"48ca4817849d48fadce48ac6227c59c87fbb9fc31366a26a7c4077f88dac5bd5"} Sep 29 19:12:48 crc kubenswrapper[4792]: I0929 19:12:48.574503 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/ironic-operator-controller-manager-7975b88857-nfk4r" Sep 29 19:12:48 crc kubenswrapper[4792]: I0929 19:12:48.576932 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-64d7b59854-rrnhb" event={"ID":"020f5851-2dbc-464b-9217-6a3cb7a737a7","Type":"ContainerStarted","Data":"c0c8ff84538e82fbe9ac158df6c1cb9f8865cbf3ffe7e2acbcbb9d5397c12b52"} Sep 29 19:12:48 crc kubenswrapper[4792]: I0929 19:12:48.577322 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/neutron-operator-controller-manager-64d7b59854-rrnhb" Sep 29 19:12:48 crc kubenswrapper[4792]: I0929 19:12:48.577739 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/octavia-operator-controller-manager-76fcc6dc7c-9wbkb" podStartSLOduration=3.13669232 podStartE2EDuration="35.577731579s" podCreationTimestamp="2025-09-29 19:12:13 +0000 UTC" firstStartedPulling="2025-09-29 19:12:15.770343197 +0000 UTC m=+947.763650593" lastFinishedPulling="2025-09-29 19:12:48.211382456 +0000 UTC m=+980.204689852" observedRunningTime="2025-09-29 19:12:48.575174662 +0000 UTC m=+980.568482058" watchObservedRunningTime="2025-09-29 19:12:48.577731579 +0000 UTC m=+980.571038975" Sep 29 19:12:48 crc kubenswrapper[4792]: I0929 19:12:48.585757 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-6d776955-44cz7" event={"ID":"c0dd6d9d-3f07-4723-ae97-7adb0a4863b1","Type":"ContainerStarted","Data":"b94922f8ca1d37be08c7dfeba9a7bd2ab0ff92eaa431ed4e805fb7a79be8c6a0"} Sep 29 19:12:48 crc kubenswrapper[4792]: I0929 19:12:48.585869 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-baremetal-operator-controller-manager-6d776955-44cz7" Sep 29 19:12:48 crc kubenswrapper[4792]: E0929 19:12:48.587689 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/keystone-operator@sha256:23fcec0642cbd40af10bca0c5d4e538662d21eda98d6dfec37c38b4d7a47191a\\\"\"" pod="openstack-operators/keystone-operator-controller-manager-5bd55b4bff-swj98" podUID="b9148442-b4dc-4926-920d-33c9a00172fa" Sep 29 19:12:48 crc kubenswrapper[4792]: E0929 19:12:48.587732 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/manila-operator@sha256:4cdb30423c14ab48888aeeb699259bd9051284ec9f874ed9bab94c7965f45884\\\"\"" pod="openstack-operators/manila-operator-controller-manager-6d68dbc695-zzmf2" podUID="49160b59-f488-40f9-b23d-a3bccc3c2cb9" Sep 29 19:12:48 crc kubenswrapper[4792]: E0929 19:12:48.587876 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/watcher-operator@sha256:7169dfadf5f5589f14ca52700d2eba991c2a0c7733f6a1ea795752d993d7f61b\\\"\"" pod="openstack-operators/watcher-operator-controller-manager-76669f99c-m88jp" podUID="5ae40942-75a6-41a6-877a-4070bd348d32" Sep 29 19:12:48 crc kubenswrapper[4792]: I0929 19:12:48.621036 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/glance-operator-controller-manager-84958c4d49-j2crr" podStartSLOduration=3.882427115 podStartE2EDuration="35.620087739s" podCreationTimestamp="2025-09-29 19:12:13 +0000 UTC" firstStartedPulling="2025-09-29 19:12:15.244977839 +0000 UTC m=+947.238285235" lastFinishedPulling="2025-09-29 19:12:46.982638463 +0000 UTC m=+978.975945859" observedRunningTime="2025-09-29 19:12:48.599114909 +0000 UTC m=+980.592422305" watchObservedRunningTime="2025-09-29 19:12:48.620087739 +0000 UTC m=+980.613395135" Sep 29 19:12:48 crc kubenswrapper[4792]: I0929 19:12:48.621605 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/designate-operator-controller-manager-84f4f7b77b-h9csw" podStartSLOduration=5.350106733 podStartE2EDuration="35.621599093s" podCreationTimestamp="2025-09-29 19:12:13 +0000 UTC" firstStartedPulling="2025-09-29 19:12:14.922977497 +0000 UTC m=+946.916284893" lastFinishedPulling="2025-09-29 19:12:45.194469837 +0000 UTC m=+977.187777253" observedRunningTime="2025-09-29 19:12:48.619594998 +0000 UTC m=+980.612902414" watchObservedRunningTime="2025-09-29 19:12:48.621599093 +0000 UTC m=+980.614906479" Sep 29 19:12:48 crc kubenswrapper[4792]: I0929 19:12:48.641451 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/ironic-operator-controller-manager-7975b88857-nfk4r" podStartSLOduration=3.228446023 podStartE2EDuration="35.641435748s" podCreationTimestamp="2025-09-29 19:12:13 +0000 UTC" firstStartedPulling="2025-09-29 19:12:15.797762116 +0000 UTC m=+947.791069512" lastFinishedPulling="2025-09-29 19:12:48.210751841 +0000 UTC m=+980.204059237" observedRunningTime="2025-09-29 19:12:48.636355864 +0000 UTC m=+980.629663270" watchObservedRunningTime="2025-09-29 19:12:48.641435748 +0000 UTC m=+980.634743144" Sep 29 19:12:48 crc kubenswrapper[4792]: I0929 19:12:48.661118 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/mariadb-operator-controller-manager-88c7-t4srt" podStartSLOduration=6.289989957 podStartE2EDuration="35.661102729s" podCreationTimestamp="2025-09-29 19:12:13 +0000 UTC" firstStartedPulling="2025-09-29 19:12:15.823001307 +0000 UTC m=+947.816308703" lastFinishedPulling="2025-09-29 19:12:45.194114069 +0000 UTC m=+977.187421475" observedRunningTime="2025-09-29 19:12:48.657192621 +0000 UTC m=+980.650500037" watchObservedRunningTime="2025-09-29 19:12:48.661102729 +0000 UTC m=+980.654410125" Sep 29 19:12:48 crc kubenswrapper[4792]: I0929 19:12:48.685798 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/heat-operator-controller-manager-5d889d78cf-fcg79" podStartSLOduration=5.403502585 podStartE2EDuration="35.685785182s" podCreationTimestamp="2025-09-29 19:12:13 +0000 UTC" firstStartedPulling="2025-09-29 19:12:14.912018676 +0000 UTC m=+946.905326072" lastFinishedPulling="2025-09-29 19:12:45.194301253 +0000 UTC m=+977.187608669" observedRunningTime="2025-09-29 19:12:48.683848589 +0000 UTC m=+980.677155995" watchObservedRunningTime="2025-09-29 19:12:48.685785182 +0000 UTC m=+980.679092578" Sep 29 19:12:48 crc kubenswrapper[4792]: I0929 19:12:48.718834 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/neutron-operator-controller-manager-64d7b59854-rrnhb" podStartSLOduration=6.269573287 podStartE2EDuration="35.718815013s" podCreationTimestamp="2025-09-29 19:12:13 +0000 UTC" firstStartedPulling="2025-09-29 19:12:15.745542978 +0000 UTC m=+947.738850374" lastFinishedPulling="2025-09-29 19:12:45.194784704 +0000 UTC m=+977.188092100" observedRunningTime="2025-09-29 19:12:48.718406524 +0000 UTC m=+980.711713930" watchObservedRunningTime="2025-09-29 19:12:48.718815013 +0000 UTC m=+980.712122409" Sep 29 19:12:48 crc kubenswrapper[4792]: I0929 19:12:48.773734 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-baremetal-operator-controller-manager-6d776955-44cz7" podStartSLOduration=7.000587332 podStartE2EDuration="35.773711504s" podCreationTimestamp="2025-09-29 19:12:13 +0000 UTC" firstStartedPulling="2025-09-29 19:12:16.421223882 +0000 UTC m=+948.414531278" lastFinishedPulling="2025-09-29 19:12:45.194348034 +0000 UTC m=+977.187655450" observedRunningTime="2025-09-29 19:12:48.765866338 +0000 UTC m=+980.759173744" watchObservedRunningTime="2025-09-29 19:12:48.773711504 +0000 UTC m=+980.767018900" Sep 29 19:12:49 crc kubenswrapper[4792]: I0929 19:12:49.594334 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-9976ff44c-zc87x" event={"ID":"30ffe357-8b65-4481-95f2-7b2e13fd5676","Type":"ContainerStarted","Data":"bec51b24721367e432f9b92965daef80446a83330aa1317c1ef8580f0c510445"} Sep 29 19:12:49 crc kubenswrapper[4792]: I0929 19:12:49.595542 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/ovn-operator-controller-manager-9976ff44c-zc87x" Sep 29 19:12:49 crc kubenswrapper[4792]: I0929 19:12:49.598539 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-b8d54b5d7-x5h9k" event={"ID":"aa049eb9-e9cf-47c9-a06b-91e8c787e6c1","Type":"ContainerStarted","Data":"aa1bf8faa5625df27454b20cfc65028acabee128477e87265ca597cd0b84220a"} Sep 29 19:12:49 crc kubenswrapper[4792]: I0929 19:12:49.601273 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-6ff8b75857-c6z65" event={"ID":"ee957b59-f5b6-4306-b6a7-4550199fe910","Type":"ContainerStarted","Data":"125dc51c47fc631848ea2d360206a01fe7128e2f199db05cc5f9b295b342610e"} Sep 29 19:12:49 crc kubenswrapper[4792]: I0929 19:12:49.617349 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/ovn-operator-controller-manager-9976ff44c-zc87x" podStartSLOduration=3.670002554 podStartE2EDuration="36.61733425s" podCreationTimestamp="2025-09-29 19:12:13 +0000 UTC" firstStartedPulling="2025-09-29 19:12:15.800132089 +0000 UTC m=+947.793439485" lastFinishedPulling="2025-09-29 19:12:48.747463785 +0000 UTC m=+980.740771181" observedRunningTime="2025-09-29 19:12:49.612178995 +0000 UTC m=+981.605486401" watchObservedRunningTime="2025-09-29 19:12:49.61733425 +0000 UTC m=+981.610641646" Sep 29 19:12:49 crc kubenswrapper[4792]: I0929 19:12:49.634220 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/telemetry-operator-controller-manager-b8d54b5d7-x5h9k" podStartSLOduration=3.716886676 podStartE2EDuration="36.634204819s" podCreationTimestamp="2025-09-29 19:12:13 +0000 UTC" firstStartedPulling="2025-09-29 19:12:15.833825295 +0000 UTC m=+947.827132691" lastFinishedPulling="2025-09-29 19:12:48.751143438 +0000 UTC m=+980.744450834" observedRunningTime="2025-09-29 19:12:49.629214597 +0000 UTC m=+981.622522003" watchObservedRunningTime="2025-09-29 19:12:49.634204819 +0000 UTC m=+981.627512215" Sep 29 19:12:49 crc kubenswrapper[4792]: I0929 19:12:49.650064 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/barbican-operator-controller-manager-6ff8b75857-c6z65" podStartSLOduration=3.71407283 podStartE2EDuration="36.650045924s" podCreationTimestamp="2025-09-29 19:12:13 +0000 UTC" firstStartedPulling="2025-09-29 19:12:15.741824479 +0000 UTC m=+947.735131875" lastFinishedPulling="2025-09-29 19:12:48.677797573 +0000 UTC m=+980.671104969" observedRunningTime="2025-09-29 19:12:49.645112973 +0000 UTC m=+981.638420369" watchObservedRunningTime="2025-09-29 19:12:49.650045924 +0000 UTC m=+981.643353320" Sep 29 19:12:50 crc kubenswrapper[4792]: I0929 19:12:50.607311 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/telemetry-operator-controller-manager-b8d54b5d7-x5h9k" Sep 29 19:12:50 crc kubenswrapper[4792]: I0929 19:12:50.608414 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/barbican-operator-controller-manager-6ff8b75857-c6z65" Sep 29 19:12:53 crc kubenswrapper[4792]: I0929 19:12:53.391603 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/cinder-operator-controller-manager-644bddb6d8-vjkgm" Sep 29 19:12:53 crc kubenswrapper[4792]: I0929 19:12:53.422448 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/barbican-operator-controller-manager-6ff8b75857-c6z65" Sep 29 19:12:53 crc kubenswrapper[4792]: I0929 19:12:53.439575 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/designate-operator-controller-manager-84f4f7b77b-h9csw" Sep 29 19:12:53 crc kubenswrapper[4792]: I0929 19:12:53.479373 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/glance-operator-controller-manager-84958c4d49-j2crr" Sep 29 19:12:53 crc kubenswrapper[4792]: I0929 19:12:53.536262 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/heat-operator-controller-manager-5d889d78cf-fcg79" Sep 29 19:12:53 crc kubenswrapper[4792]: I0929 19:12:53.653109 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/horizon-operator-controller-manager-9f4696d94-d48vc" Sep 29 19:12:53 crc kubenswrapper[4792]: I0929 19:12:53.813800 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/ironic-operator-controller-manager-7975b88857-nfk4r" Sep 29 19:12:53 crc kubenswrapper[4792]: I0929 19:12:53.837048 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/mariadb-operator-controller-manager-88c7-t4srt" Sep 29 19:12:53 crc kubenswrapper[4792]: I0929 19:12:53.891430 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/neutron-operator-controller-manager-64d7b59854-rrnhb" Sep 29 19:12:53 crc kubenswrapper[4792]: I0929 19:12:53.942797 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/octavia-operator-controller-manager-76fcc6dc7c-9wbkb" Sep 29 19:12:53 crc kubenswrapper[4792]: I0929 19:12:53.944659 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/nova-operator-controller-manager-c7c776c96-4dfhr" Sep 29 19:12:53 crc kubenswrapper[4792]: I0929 19:12:53.976995 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/ovn-operator-controller-manager-9976ff44c-zc87x" Sep 29 19:12:54 crc kubenswrapper[4792]: I0929 19:12:54.044093 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/placement-operator-controller-manager-589c58c6c-f8qhj" Sep 29 19:12:54 crc kubenswrapper[4792]: I0929 19:12:54.125378 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/telemetry-operator-controller-manager-b8d54b5d7-x5h9k" Sep 29 19:12:54 crc kubenswrapper[4792]: I0929 19:12:54.176981 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/swift-operator-controller-manager-bc7dc7bd9-4p4rl" Sep 29 19:12:54 crc kubenswrapper[4792]: I0929 19:12:54.186683 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/test-operator-controller-manager-f66b554c6-frkgk" Sep 29 19:12:55 crc kubenswrapper[4792]: I0929 19:12:55.220022 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/infra-operator-controller-manager-7d857cc749-fn8fk" Sep 29 19:12:55 crc kubenswrapper[4792]: I0929 19:12:55.470836 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-baremetal-operator-controller-manager-6d776955-44cz7" Sep 29 19:12:59 crc kubenswrapper[4792]: I0929 19:12:59.666287 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-manager-79d8469568-vxkfn" event={"ID":"5bc872e6-ce23-49cc-8ae7-bf92e4edda47","Type":"ContainerStarted","Data":"ebbef680ec17377819cdcc7aeedc48ae878ebefeea66926ff89bd64e5673c84c"} Sep 29 19:12:59 crc kubenswrapper[4792]: I0929 19:12:59.684256 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/rabbitmq-cluster-operator-manager-79d8469568-vxkfn" podStartSLOduration=2.349032879 podStartE2EDuration="45.68423378s" podCreationTimestamp="2025-09-29 19:12:14 +0000 UTC" firstStartedPulling="2025-09-29 19:12:16.141341702 +0000 UTC m=+948.134649098" lastFinishedPulling="2025-09-29 19:12:59.476542603 +0000 UTC m=+991.469849999" observedRunningTime="2025-09-29 19:12:59.680172379 +0000 UTC m=+991.673479775" watchObservedRunningTime="2025-09-29 19:12:59.68423378 +0000 UTC m=+991.677541196" Sep 29 19:13:01 crc kubenswrapper[4792]: I0929 19:13:01.679518 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-6d68dbc695-zzmf2" event={"ID":"49160b59-f488-40f9-b23d-a3bccc3c2cb9","Type":"ContainerStarted","Data":"baa74a9d3382d099bd2d35309c3e6cfa63da1e301c556be9e91c5f5af0a8d71e"} Sep 29 19:13:01 crc kubenswrapper[4792]: I0929 19:13:01.680610 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/manila-operator-controller-manager-6d68dbc695-zzmf2" Sep 29 19:13:01 crc kubenswrapper[4792]: I0929 19:13:01.695898 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/manila-operator-controller-manager-6d68dbc695-zzmf2" podStartSLOduration=3.201454137 podStartE2EDuration="48.695839756s" podCreationTimestamp="2025-09-29 19:12:13 +0000 UTC" firstStartedPulling="2025-09-29 19:12:16.01116856 +0000 UTC m=+948.004475956" lastFinishedPulling="2025-09-29 19:13:01.505554179 +0000 UTC m=+993.498861575" observedRunningTime="2025-09-29 19:13:01.695051289 +0000 UTC m=+993.688358695" watchObservedRunningTime="2025-09-29 19:13:01.695839756 +0000 UTC m=+993.689147152" Sep 29 19:13:02 crc kubenswrapper[4792]: I0929 19:13:02.017612 4792 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Sep 29 19:13:02 crc kubenswrapper[4792]: I0929 19:13:02.687708 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-5bd55b4bff-swj98" event={"ID":"b9148442-b4dc-4926-920d-33c9a00172fa","Type":"ContainerStarted","Data":"dcfa109fa3f39391c1a3865eedf2272152b7ce77447b2aacafdaafbc90b20c66"} Sep 29 19:13:02 crc kubenswrapper[4792]: I0929 19:13:02.687986 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/keystone-operator-controller-manager-5bd55b4bff-swj98" Sep 29 19:13:02 crc kubenswrapper[4792]: I0929 19:13:02.701409 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/keystone-operator-controller-manager-5bd55b4bff-swj98" podStartSLOduration=2.988460186 podStartE2EDuration="49.701390314s" podCreationTimestamp="2025-09-29 19:12:13 +0000 UTC" firstStartedPulling="2025-09-29 19:12:15.733691633 +0000 UTC m=+947.726999029" lastFinishedPulling="2025-09-29 19:13:02.446621761 +0000 UTC m=+994.439929157" observedRunningTime="2025-09-29 19:13:02.700656107 +0000 UTC m=+994.693963513" watchObservedRunningTime="2025-09-29 19:13:02.701390314 +0000 UTC m=+994.694697720" Sep 29 19:13:03 crc kubenswrapper[4792]: I0929 19:13:03.696213 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-76669f99c-m88jp" event={"ID":"5ae40942-75a6-41a6-877a-4070bd348d32","Type":"ContainerStarted","Data":"088069beb39582460f386a23cf0cb4587d17508634612c226cdf62ca88c9d5bc"} Sep 29 19:13:03 crc kubenswrapper[4792]: I0929 19:13:03.696780 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/watcher-operator-controller-manager-76669f99c-m88jp" Sep 29 19:13:03 crc kubenswrapper[4792]: I0929 19:13:03.715864 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/watcher-operator-controller-manager-76669f99c-m88jp" podStartSLOduration=3.00368746 podStartE2EDuration="50.7158404s" podCreationTimestamp="2025-09-29 19:12:13 +0000 UTC" firstStartedPulling="2025-09-29 19:12:15.744047728 +0000 UTC m=+947.737355124" lastFinishedPulling="2025-09-29 19:13:03.456200668 +0000 UTC m=+995.449508064" observedRunningTime="2025-09-29 19:13:03.711023472 +0000 UTC m=+995.704330888" watchObservedRunningTime="2025-09-29 19:13:03.7158404 +0000 UTC m=+995.709147796" Sep 29 19:13:11 crc kubenswrapper[4792]: I0929 19:13:11.959337 4792 patch_prober.go:28] interesting pod/machine-config-daemon-p5q59 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 19:13:11 crc kubenswrapper[4792]: I0929 19:13:11.959940 4792 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" podUID="0ae66548-086e-4ca9-bd6f-281ce46e7557" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 19:13:13 crc kubenswrapper[4792]: I0929 19:13:13.855745 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/keystone-operator-controller-manager-5bd55b4bff-swj98" Sep 29 19:13:13 crc kubenswrapper[4792]: I0929 19:13:13.868656 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/manila-operator-controller-manager-6d68dbc695-zzmf2" Sep 29 19:13:14 crc kubenswrapper[4792]: I0929 19:13:14.226642 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/watcher-operator-controller-manager-76669f99c-m88jp" Sep 29 19:13:32 crc kubenswrapper[4792]: I0929 19:13:32.153829 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-pl8rd"] Sep 29 19:13:32 crc kubenswrapper[4792]: I0929 19:13:32.156946 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-675f4bcbfc-pl8rd" Sep 29 19:13:32 crc kubenswrapper[4792]: I0929 19:13:32.161014 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openshift-service-ca.crt" Sep 29 19:13:32 crc kubenswrapper[4792]: I0929 19:13:32.163237 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"kube-root-ca.crt" Sep 29 19:13:32 crc kubenswrapper[4792]: I0929 19:13:32.163257 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns" Sep 29 19:13:32 crc kubenswrapper[4792]: I0929 19:13:32.163303 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dnsmasq-dns-dockercfg-wfv9z" Sep 29 19:13:32 crc kubenswrapper[4792]: I0929 19:13:32.182089 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-pl8rd"] Sep 29 19:13:32 crc kubenswrapper[4792]: I0929 19:13:32.222506 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-mqhwx"] Sep 29 19:13:32 crc kubenswrapper[4792]: I0929 19:13:32.224396 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78dd6ddcc-mqhwx" Sep 29 19:13:32 crc kubenswrapper[4792]: I0929 19:13:32.227872 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns-svc" Sep 29 19:13:32 crc kubenswrapper[4792]: I0929 19:13:32.252417 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-mqhwx"] Sep 29 19:13:32 crc kubenswrapper[4792]: I0929 19:13:32.334214 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7481394e-6d8e-4d35-be11-70e33e7d775b-dns-svc\") pod \"dnsmasq-dns-78dd6ddcc-mqhwx\" (UID: \"7481394e-6d8e-4d35-be11-70e33e7d775b\") " pod="openstack/dnsmasq-dns-78dd6ddcc-mqhwx" Sep 29 19:13:32 crc kubenswrapper[4792]: I0929 19:13:32.334500 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7481394e-6d8e-4d35-be11-70e33e7d775b-config\") pod \"dnsmasq-dns-78dd6ddcc-mqhwx\" (UID: \"7481394e-6d8e-4d35-be11-70e33e7d775b\") " pod="openstack/dnsmasq-dns-78dd6ddcc-mqhwx" Sep 29 19:13:32 crc kubenswrapper[4792]: I0929 19:13:32.334644 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k88v9\" (UniqueName: \"kubernetes.io/projected/7481394e-6d8e-4d35-be11-70e33e7d775b-kube-api-access-k88v9\") pod \"dnsmasq-dns-78dd6ddcc-mqhwx\" (UID: \"7481394e-6d8e-4d35-be11-70e33e7d775b\") " pod="openstack/dnsmasq-dns-78dd6ddcc-mqhwx" Sep 29 19:13:32 crc kubenswrapper[4792]: I0929 19:13:32.334764 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/089abc08-05e8-4e45-a751-cbb48d3fbf6b-config\") pod \"dnsmasq-dns-675f4bcbfc-pl8rd\" (UID: \"089abc08-05e8-4e45-a751-cbb48d3fbf6b\") " pod="openstack/dnsmasq-dns-675f4bcbfc-pl8rd" Sep 29 19:13:32 crc kubenswrapper[4792]: I0929 19:13:32.334915 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lpzsx\" (UniqueName: \"kubernetes.io/projected/089abc08-05e8-4e45-a751-cbb48d3fbf6b-kube-api-access-lpzsx\") pod \"dnsmasq-dns-675f4bcbfc-pl8rd\" (UID: \"089abc08-05e8-4e45-a751-cbb48d3fbf6b\") " pod="openstack/dnsmasq-dns-675f4bcbfc-pl8rd" Sep 29 19:13:32 crc kubenswrapper[4792]: I0929 19:13:32.435948 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7481394e-6d8e-4d35-be11-70e33e7d775b-dns-svc\") pod \"dnsmasq-dns-78dd6ddcc-mqhwx\" (UID: \"7481394e-6d8e-4d35-be11-70e33e7d775b\") " pod="openstack/dnsmasq-dns-78dd6ddcc-mqhwx" Sep 29 19:13:32 crc kubenswrapper[4792]: I0929 19:13:32.436317 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7481394e-6d8e-4d35-be11-70e33e7d775b-config\") pod \"dnsmasq-dns-78dd6ddcc-mqhwx\" (UID: \"7481394e-6d8e-4d35-be11-70e33e7d775b\") " pod="openstack/dnsmasq-dns-78dd6ddcc-mqhwx" Sep 29 19:13:32 crc kubenswrapper[4792]: I0929 19:13:32.436447 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k88v9\" (UniqueName: \"kubernetes.io/projected/7481394e-6d8e-4d35-be11-70e33e7d775b-kube-api-access-k88v9\") pod \"dnsmasq-dns-78dd6ddcc-mqhwx\" (UID: \"7481394e-6d8e-4d35-be11-70e33e7d775b\") " pod="openstack/dnsmasq-dns-78dd6ddcc-mqhwx" Sep 29 19:13:32 crc kubenswrapper[4792]: I0929 19:13:32.436548 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/089abc08-05e8-4e45-a751-cbb48d3fbf6b-config\") pod \"dnsmasq-dns-675f4bcbfc-pl8rd\" (UID: \"089abc08-05e8-4e45-a751-cbb48d3fbf6b\") " pod="openstack/dnsmasq-dns-675f4bcbfc-pl8rd" Sep 29 19:13:32 crc kubenswrapper[4792]: I0929 19:13:32.436646 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lpzsx\" (UniqueName: \"kubernetes.io/projected/089abc08-05e8-4e45-a751-cbb48d3fbf6b-kube-api-access-lpzsx\") pod \"dnsmasq-dns-675f4bcbfc-pl8rd\" (UID: \"089abc08-05e8-4e45-a751-cbb48d3fbf6b\") " pod="openstack/dnsmasq-dns-675f4bcbfc-pl8rd" Sep 29 19:13:32 crc kubenswrapper[4792]: I0929 19:13:32.436838 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7481394e-6d8e-4d35-be11-70e33e7d775b-dns-svc\") pod \"dnsmasq-dns-78dd6ddcc-mqhwx\" (UID: \"7481394e-6d8e-4d35-be11-70e33e7d775b\") " pod="openstack/dnsmasq-dns-78dd6ddcc-mqhwx" Sep 29 19:13:32 crc kubenswrapper[4792]: I0929 19:13:32.436962 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7481394e-6d8e-4d35-be11-70e33e7d775b-config\") pod \"dnsmasq-dns-78dd6ddcc-mqhwx\" (UID: \"7481394e-6d8e-4d35-be11-70e33e7d775b\") " pod="openstack/dnsmasq-dns-78dd6ddcc-mqhwx" Sep 29 19:13:32 crc kubenswrapper[4792]: I0929 19:13:32.437479 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/089abc08-05e8-4e45-a751-cbb48d3fbf6b-config\") pod \"dnsmasq-dns-675f4bcbfc-pl8rd\" (UID: \"089abc08-05e8-4e45-a751-cbb48d3fbf6b\") " pod="openstack/dnsmasq-dns-675f4bcbfc-pl8rd" Sep 29 19:13:32 crc kubenswrapper[4792]: I0929 19:13:32.456996 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lpzsx\" (UniqueName: \"kubernetes.io/projected/089abc08-05e8-4e45-a751-cbb48d3fbf6b-kube-api-access-lpzsx\") pod \"dnsmasq-dns-675f4bcbfc-pl8rd\" (UID: \"089abc08-05e8-4e45-a751-cbb48d3fbf6b\") " pod="openstack/dnsmasq-dns-675f4bcbfc-pl8rd" Sep 29 19:13:32 crc kubenswrapper[4792]: I0929 19:13:32.465943 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k88v9\" (UniqueName: \"kubernetes.io/projected/7481394e-6d8e-4d35-be11-70e33e7d775b-kube-api-access-k88v9\") pod \"dnsmasq-dns-78dd6ddcc-mqhwx\" (UID: \"7481394e-6d8e-4d35-be11-70e33e7d775b\") " pod="openstack/dnsmasq-dns-78dd6ddcc-mqhwx" Sep 29 19:13:32 crc kubenswrapper[4792]: I0929 19:13:32.474564 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-675f4bcbfc-pl8rd" Sep 29 19:13:32 crc kubenswrapper[4792]: I0929 19:13:32.537105 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78dd6ddcc-mqhwx" Sep 29 19:13:32 crc kubenswrapper[4792]: I0929 19:13:32.919183 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-pl8rd"] Sep 29 19:13:33 crc kubenswrapper[4792]: I0929 19:13:33.005484 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-mqhwx"] Sep 29 19:13:33 crc kubenswrapper[4792]: W0929 19:13:33.014757 4792 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7481394e_6d8e_4d35_be11_70e33e7d775b.slice/crio-8acd6d4adfb716cec233eacefeee7540a424ec91b237a11a1b8db59d056d6a15 WatchSource:0}: Error finding container 8acd6d4adfb716cec233eacefeee7540a424ec91b237a11a1b8db59d056d6a15: Status 404 returned error can't find the container with id 8acd6d4adfb716cec233eacefeee7540a424ec91b237a11a1b8db59d056d6a15 Sep 29 19:13:33 crc kubenswrapper[4792]: I0929 19:13:33.890958 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-78dd6ddcc-mqhwx" event={"ID":"7481394e-6d8e-4d35-be11-70e33e7d775b","Type":"ContainerStarted","Data":"8acd6d4adfb716cec233eacefeee7540a424ec91b237a11a1b8db59d056d6a15"} Sep 29 19:13:33 crc kubenswrapper[4792]: I0929 19:13:33.892355 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-675f4bcbfc-pl8rd" event={"ID":"089abc08-05e8-4e45-a751-cbb48d3fbf6b","Type":"ContainerStarted","Data":"86f3bd978df47baaf9147eb2d1c57bef2cc14fafd3cbe83d9c35f3284df86453"} Sep 29 19:13:34 crc kubenswrapper[4792]: I0929 19:13:34.213377 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-pl8rd"] Sep 29 19:13:34 crc kubenswrapper[4792]: I0929 19:13:34.241194 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-hwsf9"] Sep 29 19:13:34 crc kubenswrapper[4792]: I0929 19:13:34.266532 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-666b6646f7-hwsf9" Sep 29 19:13:34 crc kubenswrapper[4792]: I0929 19:13:34.312138 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-hwsf9"] Sep 29 19:13:34 crc kubenswrapper[4792]: I0929 19:13:34.378165 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a70ef7c9-6099-4acc-9a08-cc4a74aa17bb-dns-svc\") pod \"dnsmasq-dns-666b6646f7-hwsf9\" (UID: \"a70ef7c9-6099-4acc-9a08-cc4a74aa17bb\") " pod="openstack/dnsmasq-dns-666b6646f7-hwsf9" Sep 29 19:13:34 crc kubenswrapper[4792]: I0929 19:13:34.378227 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a70ef7c9-6099-4acc-9a08-cc4a74aa17bb-config\") pod \"dnsmasq-dns-666b6646f7-hwsf9\" (UID: \"a70ef7c9-6099-4acc-9a08-cc4a74aa17bb\") " pod="openstack/dnsmasq-dns-666b6646f7-hwsf9" Sep 29 19:13:34 crc kubenswrapper[4792]: I0929 19:13:34.378358 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t6rnk\" (UniqueName: \"kubernetes.io/projected/a70ef7c9-6099-4acc-9a08-cc4a74aa17bb-kube-api-access-t6rnk\") pod \"dnsmasq-dns-666b6646f7-hwsf9\" (UID: \"a70ef7c9-6099-4acc-9a08-cc4a74aa17bb\") " pod="openstack/dnsmasq-dns-666b6646f7-hwsf9" Sep 29 19:13:34 crc kubenswrapper[4792]: I0929 19:13:34.482948 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t6rnk\" (UniqueName: \"kubernetes.io/projected/a70ef7c9-6099-4acc-9a08-cc4a74aa17bb-kube-api-access-t6rnk\") pod \"dnsmasq-dns-666b6646f7-hwsf9\" (UID: \"a70ef7c9-6099-4acc-9a08-cc4a74aa17bb\") " pod="openstack/dnsmasq-dns-666b6646f7-hwsf9" Sep 29 19:13:34 crc kubenswrapper[4792]: I0929 19:13:34.483037 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a70ef7c9-6099-4acc-9a08-cc4a74aa17bb-dns-svc\") pod \"dnsmasq-dns-666b6646f7-hwsf9\" (UID: \"a70ef7c9-6099-4acc-9a08-cc4a74aa17bb\") " pod="openstack/dnsmasq-dns-666b6646f7-hwsf9" Sep 29 19:13:34 crc kubenswrapper[4792]: I0929 19:13:34.483060 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a70ef7c9-6099-4acc-9a08-cc4a74aa17bb-config\") pod \"dnsmasq-dns-666b6646f7-hwsf9\" (UID: \"a70ef7c9-6099-4acc-9a08-cc4a74aa17bb\") " pod="openstack/dnsmasq-dns-666b6646f7-hwsf9" Sep 29 19:13:34 crc kubenswrapper[4792]: I0929 19:13:34.484440 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a70ef7c9-6099-4acc-9a08-cc4a74aa17bb-config\") pod \"dnsmasq-dns-666b6646f7-hwsf9\" (UID: \"a70ef7c9-6099-4acc-9a08-cc4a74aa17bb\") " pod="openstack/dnsmasq-dns-666b6646f7-hwsf9" Sep 29 19:13:34 crc kubenswrapper[4792]: I0929 19:13:34.497551 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a70ef7c9-6099-4acc-9a08-cc4a74aa17bb-dns-svc\") pod \"dnsmasq-dns-666b6646f7-hwsf9\" (UID: \"a70ef7c9-6099-4acc-9a08-cc4a74aa17bb\") " pod="openstack/dnsmasq-dns-666b6646f7-hwsf9" Sep 29 19:13:34 crc kubenswrapper[4792]: I0929 19:13:34.528734 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t6rnk\" (UniqueName: \"kubernetes.io/projected/a70ef7c9-6099-4acc-9a08-cc4a74aa17bb-kube-api-access-t6rnk\") pod \"dnsmasq-dns-666b6646f7-hwsf9\" (UID: \"a70ef7c9-6099-4acc-9a08-cc4a74aa17bb\") " pod="openstack/dnsmasq-dns-666b6646f7-hwsf9" Sep 29 19:13:34 crc kubenswrapper[4792]: I0929 19:13:34.612199 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-666b6646f7-hwsf9" Sep 29 19:13:34 crc kubenswrapper[4792]: I0929 19:13:34.676303 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-mqhwx"] Sep 29 19:13:34 crc kubenswrapper[4792]: I0929 19:13:34.708278 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-n5gmr"] Sep 29 19:13:34 crc kubenswrapper[4792]: I0929 19:13:34.709536 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57d769cc4f-n5gmr" Sep 29 19:13:34 crc kubenswrapper[4792]: I0929 19:13:34.733898 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-n5gmr"] Sep 29 19:13:34 crc kubenswrapper[4792]: I0929 19:13:34.796200 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/79e97193-edc8-43b3-a482-b3e3a0354cb5-dns-svc\") pod \"dnsmasq-dns-57d769cc4f-n5gmr\" (UID: \"79e97193-edc8-43b3-a482-b3e3a0354cb5\") " pod="openstack/dnsmasq-dns-57d769cc4f-n5gmr" Sep 29 19:13:34 crc kubenswrapper[4792]: I0929 19:13:34.796246 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/79e97193-edc8-43b3-a482-b3e3a0354cb5-config\") pod \"dnsmasq-dns-57d769cc4f-n5gmr\" (UID: \"79e97193-edc8-43b3-a482-b3e3a0354cb5\") " pod="openstack/dnsmasq-dns-57d769cc4f-n5gmr" Sep 29 19:13:34 crc kubenswrapper[4792]: I0929 19:13:34.796267 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mzvls\" (UniqueName: \"kubernetes.io/projected/79e97193-edc8-43b3-a482-b3e3a0354cb5-kube-api-access-mzvls\") pod \"dnsmasq-dns-57d769cc4f-n5gmr\" (UID: \"79e97193-edc8-43b3-a482-b3e3a0354cb5\") " pod="openstack/dnsmasq-dns-57d769cc4f-n5gmr" Sep 29 19:13:34 crc kubenswrapper[4792]: I0929 19:13:34.898979 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/79e97193-edc8-43b3-a482-b3e3a0354cb5-dns-svc\") pod \"dnsmasq-dns-57d769cc4f-n5gmr\" (UID: \"79e97193-edc8-43b3-a482-b3e3a0354cb5\") " pod="openstack/dnsmasq-dns-57d769cc4f-n5gmr" Sep 29 19:13:34 crc kubenswrapper[4792]: I0929 19:13:34.899035 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/79e97193-edc8-43b3-a482-b3e3a0354cb5-config\") pod \"dnsmasq-dns-57d769cc4f-n5gmr\" (UID: \"79e97193-edc8-43b3-a482-b3e3a0354cb5\") " pod="openstack/dnsmasq-dns-57d769cc4f-n5gmr" Sep 29 19:13:34 crc kubenswrapper[4792]: I0929 19:13:34.899057 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mzvls\" (UniqueName: \"kubernetes.io/projected/79e97193-edc8-43b3-a482-b3e3a0354cb5-kube-api-access-mzvls\") pod \"dnsmasq-dns-57d769cc4f-n5gmr\" (UID: \"79e97193-edc8-43b3-a482-b3e3a0354cb5\") " pod="openstack/dnsmasq-dns-57d769cc4f-n5gmr" Sep 29 19:13:34 crc kubenswrapper[4792]: I0929 19:13:34.900127 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/79e97193-edc8-43b3-a482-b3e3a0354cb5-dns-svc\") pod \"dnsmasq-dns-57d769cc4f-n5gmr\" (UID: \"79e97193-edc8-43b3-a482-b3e3a0354cb5\") " pod="openstack/dnsmasq-dns-57d769cc4f-n5gmr" Sep 29 19:13:34 crc kubenswrapper[4792]: I0929 19:13:34.900177 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/79e97193-edc8-43b3-a482-b3e3a0354cb5-config\") pod \"dnsmasq-dns-57d769cc4f-n5gmr\" (UID: \"79e97193-edc8-43b3-a482-b3e3a0354cb5\") " pod="openstack/dnsmasq-dns-57d769cc4f-n5gmr" Sep 29 19:13:34 crc kubenswrapper[4792]: I0929 19:13:34.937653 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mzvls\" (UniqueName: \"kubernetes.io/projected/79e97193-edc8-43b3-a482-b3e3a0354cb5-kube-api-access-mzvls\") pod \"dnsmasq-dns-57d769cc4f-n5gmr\" (UID: \"79e97193-edc8-43b3-a482-b3e3a0354cb5\") " pod="openstack/dnsmasq-dns-57d769cc4f-n5gmr" Sep 29 19:13:35 crc kubenswrapper[4792]: I0929 19:13:35.127507 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57d769cc4f-n5gmr" Sep 29 19:13:35 crc kubenswrapper[4792]: I0929 19:13:35.408352 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-hwsf9"] Sep 29 19:13:35 crc kubenswrapper[4792]: W0929 19:13:35.419222 4792 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda70ef7c9_6099_4acc_9a08_cc4a74aa17bb.slice/crio-818e10797eb8377db26910649781f06c9466be013324495e51d4807e8cfe543f WatchSource:0}: Error finding container 818e10797eb8377db26910649781f06c9466be013324495e51d4807e8cfe543f: Status 404 returned error can't find the container with id 818e10797eb8377db26910649781f06c9466be013324495e51d4807e8cfe543f Sep 29 19:13:35 crc kubenswrapper[4792]: I0929 19:13:35.470748 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-server-0"] Sep 29 19:13:35 crc kubenswrapper[4792]: I0929 19:13:35.472322 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Sep 29 19:13:35 crc kubenswrapper[4792]: I0929 19:13:35.477276 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-default-user" Sep 29 19:13:35 crc kubenswrapper[4792]: I0929 19:13:35.477436 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-erlang-cookie" Sep 29 19:13:35 crc kubenswrapper[4792]: I0929 19:13:35.477661 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-svc" Sep 29 19:13:35 crc kubenswrapper[4792]: I0929 19:13:35.478161 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-server-dockercfg-dwb5k" Sep 29 19:13:35 crc kubenswrapper[4792]: I0929 19:13:35.478293 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-config-data" Sep 29 19:13:35 crc kubenswrapper[4792]: I0929 19:13:35.478823 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Sep 29 19:13:35 crc kubenswrapper[4792]: I0929 19:13:35.479843 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-plugins-conf" Sep 29 19:13:35 crc kubenswrapper[4792]: I0929 19:13:35.480308 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-server-conf" Sep 29 19:13:35 crc kubenswrapper[4792]: I0929 19:13:35.525880 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f8mjt\" (UniqueName: \"kubernetes.io/projected/cf5405ae-97dd-404d-9b0c-4d0faaf961cb-kube-api-access-f8mjt\") pod \"rabbitmq-server-0\" (UID: \"cf5405ae-97dd-404d-9b0c-4d0faaf961cb\") " pod="openstack/rabbitmq-server-0" Sep 29 19:13:35 crc kubenswrapper[4792]: I0929 19:13:35.525926 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/cf5405ae-97dd-404d-9b0c-4d0faaf961cb-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"cf5405ae-97dd-404d-9b0c-4d0faaf961cb\") " pod="openstack/rabbitmq-server-0" Sep 29 19:13:35 crc kubenswrapper[4792]: I0929 19:13:35.525950 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/cf5405ae-97dd-404d-9b0c-4d0faaf961cb-server-conf\") pod \"rabbitmq-server-0\" (UID: \"cf5405ae-97dd-404d-9b0c-4d0faaf961cb\") " pod="openstack/rabbitmq-server-0" Sep 29 19:13:35 crc kubenswrapper[4792]: I0929 19:13:35.525966 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/cf5405ae-97dd-404d-9b0c-4d0faaf961cb-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"cf5405ae-97dd-404d-9b0c-4d0faaf961cb\") " pod="openstack/rabbitmq-server-0" Sep 29 19:13:35 crc kubenswrapper[4792]: I0929 19:13:35.526011 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/cf5405ae-97dd-404d-9b0c-4d0faaf961cb-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"cf5405ae-97dd-404d-9b0c-4d0faaf961cb\") " pod="openstack/rabbitmq-server-0" Sep 29 19:13:35 crc kubenswrapper[4792]: I0929 19:13:35.526039 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/cf5405ae-97dd-404d-9b0c-4d0faaf961cb-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"cf5405ae-97dd-404d-9b0c-4d0faaf961cb\") " pod="openstack/rabbitmq-server-0" Sep 29 19:13:35 crc kubenswrapper[4792]: I0929 19:13:35.526064 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/cf5405ae-97dd-404d-9b0c-4d0faaf961cb-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"cf5405ae-97dd-404d-9b0c-4d0faaf961cb\") " pod="openstack/rabbitmq-server-0" Sep 29 19:13:35 crc kubenswrapper[4792]: I0929 19:13:35.526088 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/cf5405ae-97dd-404d-9b0c-4d0faaf961cb-config-data\") pod \"rabbitmq-server-0\" (UID: \"cf5405ae-97dd-404d-9b0c-4d0faaf961cb\") " pod="openstack/rabbitmq-server-0" Sep 29 19:13:35 crc kubenswrapper[4792]: I0929 19:13:35.526120 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/cf5405ae-97dd-404d-9b0c-4d0faaf961cb-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"cf5405ae-97dd-404d-9b0c-4d0faaf961cb\") " pod="openstack/rabbitmq-server-0" Sep 29 19:13:35 crc kubenswrapper[4792]: I0929 19:13:35.526146 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"rabbitmq-server-0\" (UID: \"cf5405ae-97dd-404d-9b0c-4d0faaf961cb\") " pod="openstack/rabbitmq-server-0" Sep 29 19:13:35 crc kubenswrapper[4792]: I0929 19:13:35.526166 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/cf5405ae-97dd-404d-9b0c-4d0faaf961cb-pod-info\") pod \"rabbitmq-server-0\" (UID: \"cf5405ae-97dd-404d-9b0c-4d0faaf961cb\") " pod="openstack/rabbitmq-server-0" Sep 29 19:13:35 crc kubenswrapper[4792]: I0929 19:13:35.611527 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-n5gmr"] Sep 29 19:13:35 crc kubenswrapper[4792]: I0929 19:13:35.637958 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/cf5405ae-97dd-404d-9b0c-4d0faaf961cb-config-data\") pod \"rabbitmq-server-0\" (UID: \"cf5405ae-97dd-404d-9b0c-4d0faaf961cb\") " pod="openstack/rabbitmq-server-0" Sep 29 19:13:35 crc kubenswrapper[4792]: I0929 19:13:35.638017 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/cf5405ae-97dd-404d-9b0c-4d0faaf961cb-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"cf5405ae-97dd-404d-9b0c-4d0faaf961cb\") " pod="openstack/rabbitmq-server-0" Sep 29 19:13:35 crc kubenswrapper[4792]: I0929 19:13:35.638037 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"rabbitmq-server-0\" (UID: \"cf5405ae-97dd-404d-9b0c-4d0faaf961cb\") " pod="openstack/rabbitmq-server-0" Sep 29 19:13:35 crc kubenswrapper[4792]: I0929 19:13:35.638058 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/cf5405ae-97dd-404d-9b0c-4d0faaf961cb-pod-info\") pod \"rabbitmq-server-0\" (UID: \"cf5405ae-97dd-404d-9b0c-4d0faaf961cb\") " pod="openstack/rabbitmq-server-0" Sep 29 19:13:35 crc kubenswrapper[4792]: I0929 19:13:35.638086 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f8mjt\" (UniqueName: \"kubernetes.io/projected/cf5405ae-97dd-404d-9b0c-4d0faaf961cb-kube-api-access-f8mjt\") pod \"rabbitmq-server-0\" (UID: \"cf5405ae-97dd-404d-9b0c-4d0faaf961cb\") " pod="openstack/rabbitmq-server-0" Sep 29 19:13:35 crc kubenswrapper[4792]: I0929 19:13:35.638111 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/cf5405ae-97dd-404d-9b0c-4d0faaf961cb-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"cf5405ae-97dd-404d-9b0c-4d0faaf961cb\") " pod="openstack/rabbitmq-server-0" Sep 29 19:13:35 crc kubenswrapper[4792]: I0929 19:13:35.638132 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/cf5405ae-97dd-404d-9b0c-4d0faaf961cb-server-conf\") pod \"rabbitmq-server-0\" (UID: \"cf5405ae-97dd-404d-9b0c-4d0faaf961cb\") " pod="openstack/rabbitmq-server-0" Sep 29 19:13:35 crc kubenswrapper[4792]: I0929 19:13:35.638148 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/cf5405ae-97dd-404d-9b0c-4d0faaf961cb-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"cf5405ae-97dd-404d-9b0c-4d0faaf961cb\") " pod="openstack/rabbitmq-server-0" Sep 29 19:13:35 crc kubenswrapper[4792]: I0929 19:13:35.638169 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/cf5405ae-97dd-404d-9b0c-4d0faaf961cb-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"cf5405ae-97dd-404d-9b0c-4d0faaf961cb\") " pod="openstack/rabbitmq-server-0" Sep 29 19:13:35 crc kubenswrapper[4792]: I0929 19:13:35.638185 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/cf5405ae-97dd-404d-9b0c-4d0faaf961cb-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"cf5405ae-97dd-404d-9b0c-4d0faaf961cb\") " pod="openstack/rabbitmq-server-0" Sep 29 19:13:35 crc kubenswrapper[4792]: I0929 19:13:35.638211 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/cf5405ae-97dd-404d-9b0c-4d0faaf961cb-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"cf5405ae-97dd-404d-9b0c-4d0faaf961cb\") " pod="openstack/rabbitmq-server-0" Sep 29 19:13:35 crc kubenswrapper[4792]: I0929 19:13:35.638781 4792 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"rabbitmq-server-0\" (UID: \"cf5405ae-97dd-404d-9b0c-4d0faaf961cb\") device mount path \"/mnt/openstack/pv09\"" pod="openstack/rabbitmq-server-0" Sep 29 19:13:35 crc kubenswrapper[4792]: I0929 19:13:35.639053 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/cf5405ae-97dd-404d-9b0c-4d0faaf961cb-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"cf5405ae-97dd-404d-9b0c-4d0faaf961cb\") " pod="openstack/rabbitmq-server-0" Sep 29 19:13:35 crc kubenswrapper[4792]: I0929 19:13:35.639543 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/cf5405ae-97dd-404d-9b0c-4d0faaf961cb-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"cf5405ae-97dd-404d-9b0c-4d0faaf961cb\") " pod="openstack/rabbitmq-server-0" Sep 29 19:13:35 crc kubenswrapper[4792]: I0929 19:13:35.639892 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/cf5405ae-97dd-404d-9b0c-4d0faaf961cb-config-data\") pod \"rabbitmq-server-0\" (UID: \"cf5405ae-97dd-404d-9b0c-4d0faaf961cb\") " pod="openstack/rabbitmq-server-0" Sep 29 19:13:35 crc kubenswrapper[4792]: I0929 19:13:35.641159 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/cf5405ae-97dd-404d-9b0c-4d0faaf961cb-server-conf\") pod \"rabbitmq-server-0\" (UID: \"cf5405ae-97dd-404d-9b0c-4d0faaf961cb\") " pod="openstack/rabbitmq-server-0" Sep 29 19:13:35 crc kubenswrapper[4792]: I0929 19:13:35.642978 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/cf5405ae-97dd-404d-9b0c-4d0faaf961cb-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"cf5405ae-97dd-404d-9b0c-4d0faaf961cb\") " pod="openstack/rabbitmq-server-0" Sep 29 19:13:35 crc kubenswrapper[4792]: I0929 19:13:35.660381 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f8mjt\" (UniqueName: \"kubernetes.io/projected/cf5405ae-97dd-404d-9b0c-4d0faaf961cb-kube-api-access-f8mjt\") pod \"rabbitmq-server-0\" (UID: \"cf5405ae-97dd-404d-9b0c-4d0faaf961cb\") " pod="openstack/rabbitmq-server-0" Sep 29 19:13:35 crc kubenswrapper[4792]: I0929 19:13:35.670884 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/cf5405ae-97dd-404d-9b0c-4d0faaf961cb-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"cf5405ae-97dd-404d-9b0c-4d0faaf961cb\") " pod="openstack/rabbitmq-server-0" Sep 29 19:13:35 crc kubenswrapper[4792]: I0929 19:13:35.671001 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/cf5405ae-97dd-404d-9b0c-4d0faaf961cb-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"cf5405ae-97dd-404d-9b0c-4d0faaf961cb\") " pod="openstack/rabbitmq-server-0" Sep 29 19:13:35 crc kubenswrapper[4792]: I0929 19:13:35.672371 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/cf5405ae-97dd-404d-9b0c-4d0faaf961cb-pod-info\") pod \"rabbitmq-server-0\" (UID: \"cf5405ae-97dd-404d-9b0c-4d0faaf961cb\") " pod="openstack/rabbitmq-server-0" Sep 29 19:13:35 crc kubenswrapper[4792]: I0929 19:13:35.684912 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/cf5405ae-97dd-404d-9b0c-4d0faaf961cb-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"cf5405ae-97dd-404d-9b0c-4d0faaf961cb\") " pod="openstack/rabbitmq-server-0" Sep 29 19:13:35 crc kubenswrapper[4792]: I0929 19:13:35.687804 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"rabbitmq-server-0\" (UID: \"cf5405ae-97dd-404d-9b0c-4d0faaf961cb\") " pod="openstack/rabbitmq-server-0" Sep 29 19:13:35 crc kubenswrapper[4792]: I0929 19:13:35.802343 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Sep 29 19:13:35 crc kubenswrapper[4792]: I0929 19:13:35.883303 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Sep 29 19:13:35 crc kubenswrapper[4792]: I0929 19:13:35.884895 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Sep 29 19:13:35 crc kubenswrapper[4792]: I0929 19:13:35.889093 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-server-conf" Sep 29 19:13:35 crc kubenswrapper[4792]: I0929 19:13:35.889819 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-erlang-cookie" Sep 29 19:13:35 crc kubenswrapper[4792]: I0929 19:13:35.890086 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-server-dockercfg-whhj6" Sep 29 19:13:35 crc kubenswrapper[4792]: I0929 19:13:35.890211 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-plugins-conf" Sep 29 19:13:35 crc kubenswrapper[4792]: I0929 19:13:35.890379 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-default-user" Sep 29 19:13:35 crc kubenswrapper[4792]: I0929 19:13:35.891052 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-cell1-svc" Sep 29 19:13:35 crc kubenswrapper[4792]: I0929 19:13:35.892172 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-config-data" Sep 29 19:13:35 crc kubenswrapper[4792]: I0929 19:13:35.917804 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Sep 29 19:13:35 crc kubenswrapper[4792]: I0929 19:13:35.928167 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-666b6646f7-hwsf9" event={"ID":"a70ef7c9-6099-4acc-9a08-cc4a74aa17bb","Type":"ContainerStarted","Data":"818e10797eb8377db26910649781f06c9466be013324495e51d4807e8cfe543f"} Sep 29 19:13:35 crc kubenswrapper[4792]: I0929 19:13:35.934663 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57d769cc4f-n5gmr" event={"ID":"79e97193-edc8-43b3-a482-b3e3a0354cb5","Type":"ContainerStarted","Data":"c5fa26ff71f805e2c59f3d61b73a629dfe4944fe7b54217988421554f451b24c"} Sep 29 19:13:35 crc kubenswrapper[4792]: I0929 19:13:35.942783 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/62bc84b7-9b21-447c-b1c3-21c4f178ba26-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"62bc84b7-9b21-447c-b1c3-21c4f178ba26\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 19:13:35 crc kubenswrapper[4792]: I0929 19:13:35.942837 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"62bc84b7-9b21-447c-b1c3-21c4f178ba26\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 19:13:35 crc kubenswrapper[4792]: I0929 19:13:35.942856 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/62bc84b7-9b21-447c-b1c3-21c4f178ba26-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"62bc84b7-9b21-447c-b1c3-21c4f178ba26\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 19:13:35 crc kubenswrapper[4792]: I0929 19:13:35.942908 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/62bc84b7-9b21-447c-b1c3-21c4f178ba26-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"62bc84b7-9b21-447c-b1c3-21c4f178ba26\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 19:13:35 crc kubenswrapper[4792]: I0929 19:13:35.942941 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/62bc84b7-9b21-447c-b1c3-21c4f178ba26-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"62bc84b7-9b21-447c-b1c3-21c4f178ba26\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 19:13:35 crc kubenswrapper[4792]: I0929 19:13:35.942973 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/62bc84b7-9b21-447c-b1c3-21c4f178ba26-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"62bc84b7-9b21-447c-b1c3-21c4f178ba26\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 19:13:35 crc kubenswrapper[4792]: I0929 19:13:35.942990 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/62bc84b7-9b21-447c-b1c3-21c4f178ba26-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"62bc84b7-9b21-447c-b1c3-21c4f178ba26\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 19:13:35 crc kubenswrapper[4792]: I0929 19:13:35.943011 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4wznl\" (UniqueName: \"kubernetes.io/projected/62bc84b7-9b21-447c-b1c3-21c4f178ba26-kube-api-access-4wznl\") pod \"rabbitmq-cell1-server-0\" (UID: \"62bc84b7-9b21-447c-b1c3-21c4f178ba26\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 19:13:35 crc kubenswrapper[4792]: I0929 19:13:35.943033 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/62bc84b7-9b21-447c-b1c3-21c4f178ba26-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"62bc84b7-9b21-447c-b1c3-21c4f178ba26\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 19:13:35 crc kubenswrapper[4792]: I0929 19:13:35.943063 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/62bc84b7-9b21-447c-b1c3-21c4f178ba26-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"62bc84b7-9b21-447c-b1c3-21c4f178ba26\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 19:13:35 crc kubenswrapper[4792]: I0929 19:13:35.943084 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/62bc84b7-9b21-447c-b1c3-21c4f178ba26-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"62bc84b7-9b21-447c-b1c3-21c4f178ba26\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 19:13:36 crc kubenswrapper[4792]: I0929 19:13:36.044382 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/62bc84b7-9b21-447c-b1c3-21c4f178ba26-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"62bc84b7-9b21-447c-b1c3-21c4f178ba26\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 19:13:36 crc kubenswrapper[4792]: I0929 19:13:36.044717 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/62bc84b7-9b21-447c-b1c3-21c4f178ba26-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"62bc84b7-9b21-447c-b1c3-21c4f178ba26\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 19:13:36 crc kubenswrapper[4792]: I0929 19:13:36.044750 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/62bc84b7-9b21-447c-b1c3-21c4f178ba26-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"62bc84b7-9b21-447c-b1c3-21c4f178ba26\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 19:13:36 crc kubenswrapper[4792]: I0929 19:13:36.044769 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/62bc84b7-9b21-447c-b1c3-21c4f178ba26-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"62bc84b7-9b21-447c-b1c3-21c4f178ba26\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 19:13:36 crc kubenswrapper[4792]: I0929 19:13:36.044791 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4wznl\" (UniqueName: \"kubernetes.io/projected/62bc84b7-9b21-447c-b1c3-21c4f178ba26-kube-api-access-4wznl\") pod \"rabbitmq-cell1-server-0\" (UID: \"62bc84b7-9b21-447c-b1c3-21c4f178ba26\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 19:13:36 crc kubenswrapper[4792]: I0929 19:13:36.044818 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/62bc84b7-9b21-447c-b1c3-21c4f178ba26-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"62bc84b7-9b21-447c-b1c3-21c4f178ba26\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 19:13:36 crc kubenswrapper[4792]: I0929 19:13:36.044851 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/62bc84b7-9b21-447c-b1c3-21c4f178ba26-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"62bc84b7-9b21-447c-b1c3-21c4f178ba26\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 19:13:36 crc kubenswrapper[4792]: I0929 19:13:36.044887 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/62bc84b7-9b21-447c-b1c3-21c4f178ba26-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"62bc84b7-9b21-447c-b1c3-21c4f178ba26\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 19:13:36 crc kubenswrapper[4792]: I0929 19:13:36.044910 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/62bc84b7-9b21-447c-b1c3-21c4f178ba26-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"62bc84b7-9b21-447c-b1c3-21c4f178ba26\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 19:13:36 crc kubenswrapper[4792]: I0929 19:13:36.044939 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"62bc84b7-9b21-447c-b1c3-21c4f178ba26\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 19:13:36 crc kubenswrapper[4792]: I0929 19:13:36.044955 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/62bc84b7-9b21-447c-b1c3-21c4f178ba26-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"62bc84b7-9b21-447c-b1c3-21c4f178ba26\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 19:13:36 crc kubenswrapper[4792]: I0929 19:13:36.046547 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/62bc84b7-9b21-447c-b1c3-21c4f178ba26-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"62bc84b7-9b21-447c-b1c3-21c4f178ba26\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 19:13:36 crc kubenswrapper[4792]: I0929 19:13:36.046978 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/62bc84b7-9b21-447c-b1c3-21c4f178ba26-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"62bc84b7-9b21-447c-b1c3-21c4f178ba26\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 19:13:36 crc kubenswrapper[4792]: I0929 19:13:36.047189 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/62bc84b7-9b21-447c-b1c3-21c4f178ba26-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"62bc84b7-9b21-447c-b1c3-21c4f178ba26\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 19:13:36 crc kubenswrapper[4792]: I0929 19:13:36.048249 4792 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"62bc84b7-9b21-447c-b1c3-21c4f178ba26\") device mount path \"/mnt/openstack/pv01\"" pod="openstack/rabbitmq-cell1-server-0" Sep 29 19:13:36 crc kubenswrapper[4792]: I0929 19:13:36.049015 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/62bc84b7-9b21-447c-b1c3-21c4f178ba26-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"62bc84b7-9b21-447c-b1c3-21c4f178ba26\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 19:13:36 crc kubenswrapper[4792]: I0929 19:13:36.052272 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/62bc84b7-9b21-447c-b1c3-21c4f178ba26-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"62bc84b7-9b21-447c-b1c3-21c4f178ba26\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 19:13:36 crc kubenswrapper[4792]: I0929 19:13:36.052730 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/62bc84b7-9b21-447c-b1c3-21c4f178ba26-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"62bc84b7-9b21-447c-b1c3-21c4f178ba26\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 19:13:36 crc kubenswrapper[4792]: I0929 19:13:36.053619 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/62bc84b7-9b21-447c-b1c3-21c4f178ba26-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"62bc84b7-9b21-447c-b1c3-21c4f178ba26\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 19:13:36 crc kubenswrapper[4792]: I0929 19:13:36.059653 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/62bc84b7-9b21-447c-b1c3-21c4f178ba26-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"62bc84b7-9b21-447c-b1c3-21c4f178ba26\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 19:13:36 crc kubenswrapper[4792]: I0929 19:13:36.059912 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/62bc84b7-9b21-447c-b1c3-21c4f178ba26-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"62bc84b7-9b21-447c-b1c3-21c4f178ba26\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 19:13:36 crc kubenswrapper[4792]: I0929 19:13:36.063824 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4wznl\" (UniqueName: \"kubernetes.io/projected/62bc84b7-9b21-447c-b1c3-21c4f178ba26-kube-api-access-4wznl\") pod \"rabbitmq-cell1-server-0\" (UID: \"62bc84b7-9b21-447c-b1c3-21c4f178ba26\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 19:13:36 crc kubenswrapper[4792]: I0929 19:13:36.075404 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"62bc84b7-9b21-447c-b1c3-21c4f178ba26\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 19:13:36 crc kubenswrapper[4792]: I0929 19:13:36.211437 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Sep 29 19:13:36 crc kubenswrapper[4792]: I0929 19:13:36.393852 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Sep 29 19:13:36 crc kubenswrapper[4792]: W0929 19:13:36.413594 4792 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podcf5405ae_97dd_404d_9b0c_4d0faaf961cb.slice/crio-868e208c8630b90861e90343b66e1785f37474c0424779d9f0f558c4c204da2d WatchSource:0}: Error finding container 868e208c8630b90861e90343b66e1785f37474c0424779d9f0f558c4c204da2d: Status 404 returned error can't find the container with id 868e208c8630b90861e90343b66e1785f37474c0424779d9f0f558c4c204da2d Sep 29 19:13:36 crc kubenswrapper[4792]: I0929 19:13:36.723539 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Sep 29 19:13:36 crc kubenswrapper[4792]: W0929 19:13:36.763114 4792 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod62bc84b7_9b21_447c_b1c3_21c4f178ba26.slice/crio-062e455f1a835d3c9c0ecb7089ad18d69a0b52c3c14dd5286ef2350dcb9c1c57 WatchSource:0}: Error finding container 062e455f1a835d3c9c0ecb7089ad18d69a0b52c3c14dd5286ef2350dcb9c1c57: Status 404 returned error can't find the container with id 062e455f1a835d3c9c0ecb7089ad18d69a0b52c3c14dd5286ef2350dcb9c1c57 Sep 29 19:13:36 crc kubenswrapper[4792]: I0929 19:13:36.944572 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"cf5405ae-97dd-404d-9b0c-4d0faaf961cb","Type":"ContainerStarted","Data":"868e208c8630b90861e90343b66e1785f37474c0424779d9f0f558c4c204da2d"} Sep 29 19:13:36 crc kubenswrapper[4792]: I0929 19:13:36.946230 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"62bc84b7-9b21-447c-b1c3-21c4f178ba26","Type":"ContainerStarted","Data":"062e455f1a835d3c9c0ecb7089ad18d69a0b52c3c14dd5286ef2350dcb9c1c57"} Sep 29 19:13:38 crc kubenswrapper[4792]: I0929 19:13:38.520227 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstack-cell1-galera-0"] Sep 29 19:13:38 crc kubenswrapper[4792]: I0929 19:13:38.522407 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Sep 29 19:13:38 crc kubenswrapper[4792]: I0929 19:13:38.530173 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-cell1-scripts" Sep 29 19:13:38 crc kubenswrapper[4792]: I0929 19:13:38.531440 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"osp-secret" Sep 29 19:13:38 crc kubenswrapper[4792]: I0929 19:13:38.532163 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-galera-openstack-cell1-svc" Sep 29 19:13:38 crc kubenswrapper[4792]: I0929 19:13:38.532245 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-cell1-config-data" Sep 29 19:13:38 crc kubenswrapper[4792]: I0929 19:13:38.542299 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"galera-openstack-cell1-dockercfg-tvqtb" Sep 29 19:13:38 crc kubenswrapper[4792]: I0929 19:13:38.548102 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"combined-ca-bundle" Sep 29 19:13:38 crc kubenswrapper[4792]: I0929 19:13:38.552996 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-cell1-galera-0"] Sep 29 19:13:38 crc kubenswrapper[4792]: I0929 19:13:38.580046 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstack-galera-0"] Sep 29 19:13:38 crc kubenswrapper[4792]: I0929 19:13:38.583841 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Sep 29 19:13:38 crc kubenswrapper[4792]: I0929 19:13:38.586013 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"galera-openstack-dockercfg-mgcgx" Sep 29 19:13:38 crc kubenswrapper[4792]: I0929 19:13:38.590471 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-galera-openstack-svc" Sep 29 19:13:38 crc kubenswrapper[4792]: I0929 19:13:38.590673 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-config-data" Sep 29 19:13:38 crc kubenswrapper[4792]: I0929 19:13:38.590725 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-scripts" Sep 29 19:13:38 crc kubenswrapper[4792]: I0929 19:13:38.594109 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-galera-0"] Sep 29 19:13:38 crc kubenswrapper[4792]: I0929 19:13:38.635651 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"openstack-galera-0\" (UID: \"5c8592a0-091a-48ce-996c-f42bbdaf240c\") " pod="openstack/openstack-galera-0" Sep 29 19:13:38 crc kubenswrapper[4792]: I0929 19:13:38.635699 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/5c8592a0-091a-48ce-996c-f42bbdaf240c-secrets\") pod \"openstack-galera-0\" (UID: \"5c8592a0-091a-48ce-996c-f42bbdaf240c\") " pod="openstack/openstack-galera-0" Sep 29 19:13:38 crc kubenswrapper[4792]: I0929 19:13:38.635717 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5c8592a0-091a-48ce-996c-f42bbdaf240c-operator-scripts\") pod \"openstack-galera-0\" (UID: \"5c8592a0-091a-48ce-996c-f42bbdaf240c\") " pod="openstack/openstack-galera-0" Sep 29 19:13:38 crc kubenswrapper[4792]: I0929 19:13:38.635735 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/9a26454d-9ce8-4591-a7dd-6f8d4df5e3a4-secrets\") pod \"openstack-cell1-galera-0\" (UID: \"9a26454d-9ce8-4591-a7dd-6f8d4df5e3a4\") " pod="openstack/openstack-cell1-galera-0" Sep 29 19:13:38 crc kubenswrapper[4792]: I0929 19:13:38.635753 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9a26454d-9ce8-4591-a7dd-6f8d4df5e3a4-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"9a26454d-9ce8-4591-a7dd-6f8d4df5e3a4\") " pod="openstack/openstack-cell1-galera-0" Sep 29 19:13:38 crc kubenswrapper[4792]: I0929 19:13:38.635830 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/5c8592a0-091a-48ce-996c-f42bbdaf240c-config-data-default\") pod \"openstack-galera-0\" (UID: \"5c8592a0-091a-48ce-996c-f42bbdaf240c\") " pod="openstack/openstack-galera-0" Sep 29 19:13:38 crc kubenswrapper[4792]: I0929 19:13:38.635881 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/5c8592a0-091a-48ce-996c-f42bbdaf240c-config-data-generated\") pod \"openstack-galera-0\" (UID: \"5c8592a0-091a-48ce-996c-f42bbdaf240c\") " pod="openstack/openstack-galera-0" Sep 29 19:13:38 crc kubenswrapper[4792]: I0929 19:13:38.635902 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/5c8592a0-091a-48ce-996c-f42bbdaf240c-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"5c8592a0-091a-48ce-996c-f42bbdaf240c\") " pod="openstack/openstack-galera-0" Sep 29 19:13:38 crc kubenswrapper[4792]: I0929 19:13:38.635923 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/9a26454d-9ce8-4591-a7dd-6f8d4df5e3a4-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"9a26454d-9ce8-4591-a7dd-6f8d4df5e3a4\") " pod="openstack/openstack-cell1-galera-0" Sep 29 19:13:38 crc kubenswrapper[4792]: I0929 19:13:38.635949 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"openstack-cell1-galera-0\" (UID: \"9a26454d-9ce8-4591-a7dd-6f8d4df5e3a4\") " pod="openstack/openstack-cell1-galera-0" Sep 29 19:13:38 crc kubenswrapper[4792]: I0929 19:13:38.635977 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/5c8592a0-091a-48ce-996c-f42bbdaf240c-kolla-config\") pod \"openstack-galera-0\" (UID: \"5c8592a0-091a-48ce-996c-f42bbdaf240c\") " pod="openstack/openstack-galera-0" Sep 29 19:13:38 crc kubenswrapper[4792]: I0929 19:13:38.635998 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qm86n\" (UniqueName: \"kubernetes.io/projected/5c8592a0-091a-48ce-996c-f42bbdaf240c-kube-api-access-qm86n\") pod \"openstack-galera-0\" (UID: \"5c8592a0-091a-48ce-996c-f42bbdaf240c\") " pod="openstack/openstack-galera-0" Sep 29 19:13:38 crc kubenswrapper[4792]: I0929 19:13:38.636030 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/9a26454d-9ce8-4591-a7dd-6f8d4df5e3a4-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"9a26454d-9ce8-4591-a7dd-6f8d4df5e3a4\") " pod="openstack/openstack-cell1-galera-0" Sep 29 19:13:38 crc kubenswrapper[4792]: I0929 19:13:38.636052 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9a26454d-9ce8-4591-a7dd-6f8d4df5e3a4-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"9a26454d-9ce8-4591-a7dd-6f8d4df5e3a4\") " pod="openstack/openstack-cell1-galera-0" Sep 29 19:13:38 crc kubenswrapper[4792]: I0929 19:13:38.636068 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/9a26454d-9ce8-4591-a7dd-6f8d4df5e3a4-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"9a26454d-9ce8-4591-a7dd-6f8d4df5e3a4\") " pod="openstack/openstack-cell1-galera-0" Sep 29 19:13:38 crc kubenswrapper[4792]: I0929 19:13:38.636122 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5c8592a0-091a-48ce-996c-f42bbdaf240c-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"5c8592a0-091a-48ce-996c-f42bbdaf240c\") " pod="openstack/openstack-galera-0" Sep 29 19:13:38 crc kubenswrapper[4792]: I0929 19:13:38.636147 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/9a26454d-9ce8-4591-a7dd-6f8d4df5e3a4-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"9a26454d-9ce8-4591-a7dd-6f8d4df5e3a4\") " pod="openstack/openstack-cell1-galera-0" Sep 29 19:13:38 crc kubenswrapper[4792]: I0929 19:13:38.636167 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j5vdb\" (UniqueName: \"kubernetes.io/projected/9a26454d-9ce8-4591-a7dd-6f8d4df5e3a4-kube-api-access-j5vdb\") pod \"openstack-cell1-galera-0\" (UID: \"9a26454d-9ce8-4591-a7dd-6f8d4df5e3a4\") " pod="openstack/openstack-cell1-galera-0" Sep 29 19:13:38 crc kubenswrapper[4792]: I0929 19:13:38.739656 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5c8592a0-091a-48ce-996c-f42bbdaf240c-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"5c8592a0-091a-48ce-996c-f42bbdaf240c\") " pod="openstack/openstack-galera-0" Sep 29 19:13:38 crc kubenswrapper[4792]: I0929 19:13:38.739713 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/9a26454d-9ce8-4591-a7dd-6f8d4df5e3a4-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"9a26454d-9ce8-4591-a7dd-6f8d4df5e3a4\") " pod="openstack/openstack-cell1-galera-0" Sep 29 19:13:38 crc kubenswrapper[4792]: I0929 19:13:38.739735 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j5vdb\" (UniqueName: \"kubernetes.io/projected/9a26454d-9ce8-4591-a7dd-6f8d4df5e3a4-kube-api-access-j5vdb\") pod \"openstack-cell1-galera-0\" (UID: \"9a26454d-9ce8-4591-a7dd-6f8d4df5e3a4\") " pod="openstack/openstack-cell1-galera-0" Sep 29 19:13:38 crc kubenswrapper[4792]: I0929 19:13:38.739760 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"openstack-galera-0\" (UID: \"5c8592a0-091a-48ce-996c-f42bbdaf240c\") " pod="openstack/openstack-galera-0" Sep 29 19:13:38 crc kubenswrapper[4792]: I0929 19:13:38.739778 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/5c8592a0-091a-48ce-996c-f42bbdaf240c-secrets\") pod \"openstack-galera-0\" (UID: \"5c8592a0-091a-48ce-996c-f42bbdaf240c\") " pod="openstack/openstack-galera-0" Sep 29 19:13:38 crc kubenswrapper[4792]: I0929 19:13:38.739792 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5c8592a0-091a-48ce-996c-f42bbdaf240c-operator-scripts\") pod \"openstack-galera-0\" (UID: \"5c8592a0-091a-48ce-996c-f42bbdaf240c\") " pod="openstack/openstack-galera-0" Sep 29 19:13:38 crc kubenswrapper[4792]: I0929 19:13:38.739807 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/9a26454d-9ce8-4591-a7dd-6f8d4df5e3a4-secrets\") pod \"openstack-cell1-galera-0\" (UID: \"9a26454d-9ce8-4591-a7dd-6f8d4df5e3a4\") " pod="openstack/openstack-cell1-galera-0" Sep 29 19:13:38 crc kubenswrapper[4792]: I0929 19:13:38.739838 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9a26454d-9ce8-4591-a7dd-6f8d4df5e3a4-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"9a26454d-9ce8-4591-a7dd-6f8d4df5e3a4\") " pod="openstack/openstack-cell1-galera-0" Sep 29 19:13:38 crc kubenswrapper[4792]: I0929 19:13:38.739879 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/5c8592a0-091a-48ce-996c-f42bbdaf240c-config-data-default\") pod \"openstack-galera-0\" (UID: \"5c8592a0-091a-48ce-996c-f42bbdaf240c\") " pod="openstack/openstack-galera-0" Sep 29 19:13:38 crc kubenswrapper[4792]: I0929 19:13:38.739913 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/5c8592a0-091a-48ce-996c-f42bbdaf240c-config-data-generated\") pod \"openstack-galera-0\" (UID: \"5c8592a0-091a-48ce-996c-f42bbdaf240c\") " pod="openstack/openstack-galera-0" Sep 29 19:13:38 crc kubenswrapper[4792]: I0929 19:13:38.739930 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/5c8592a0-091a-48ce-996c-f42bbdaf240c-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"5c8592a0-091a-48ce-996c-f42bbdaf240c\") " pod="openstack/openstack-galera-0" Sep 29 19:13:38 crc kubenswrapper[4792]: I0929 19:13:38.739945 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/9a26454d-9ce8-4591-a7dd-6f8d4df5e3a4-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"9a26454d-9ce8-4591-a7dd-6f8d4df5e3a4\") " pod="openstack/openstack-cell1-galera-0" Sep 29 19:13:38 crc kubenswrapper[4792]: I0929 19:13:38.739959 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"openstack-cell1-galera-0\" (UID: \"9a26454d-9ce8-4591-a7dd-6f8d4df5e3a4\") " pod="openstack/openstack-cell1-galera-0" Sep 29 19:13:38 crc kubenswrapper[4792]: I0929 19:13:38.739983 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/5c8592a0-091a-48ce-996c-f42bbdaf240c-kolla-config\") pod \"openstack-galera-0\" (UID: \"5c8592a0-091a-48ce-996c-f42bbdaf240c\") " pod="openstack/openstack-galera-0" Sep 29 19:13:38 crc kubenswrapper[4792]: I0929 19:13:38.739998 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qm86n\" (UniqueName: \"kubernetes.io/projected/5c8592a0-091a-48ce-996c-f42bbdaf240c-kube-api-access-qm86n\") pod \"openstack-galera-0\" (UID: \"5c8592a0-091a-48ce-996c-f42bbdaf240c\") " pod="openstack/openstack-galera-0" Sep 29 19:13:38 crc kubenswrapper[4792]: I0929 19:13:38.740021 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/9a26454d-9ce8-4591-a7dd-6f8d4df5e3a4-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"9a26454d-9ce8-4591-a7dd-6f8d4df5e3a4\") " pod="openstack/openstack-cell1-galera-0" Sep 29 19:13:38 crc kubenswrapper[4792]: I0929 19:13:38.740040 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9a26454d-9ce8-4591-a7dd-6f8d4df5e3a4-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"9a26454d-9ce8-4591-a7dd-6f8d4df5e3a4\") " pod="openstack/openstack-cell1-galera-0" Sep 29 19:13:38 crc kubenswrapper[4792]: I0929 19:13:38.740054 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/9a26454d-9ce8-4591-a7dd-6f8d4df5e3a4-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"9a26454d-9ce8-4591-a7dd-6f8d4df5e3a4\") " pod="openstack/openstack-cell1-galera-0" Sep 29 19:13:38 crc kubenswrapper[4792]: I0929 19:13:38.740855 4792 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"openstack-galera-0\" (UID: \"5c8592a0-091a-48ce-996c-f42bbdaf240c\") device mount path \"/mnt/openstack/pv03\"" pod="openstack/openstack-galera-0" Sep 29 19:13:38 crc kubenswrapper[4792]: I0929 19:13:38.740915 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/9a26454d-9ce8-4591-a7dd-6f8d4df5e3a4-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"9a26454d-9ce8-4591-a7dd-6f8d4df5e3a4\") " pod="openstack/openstack-cell1-galera-0" Sep 29 19:13:38 crc kubenswrapper[4792]: I0929 19:13:38.741201 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/5c8592a0-091a-48ce-996c-f42bbdaf240c-config-data-generated\") pod \"openstack-galera-0\" (UID: \"5c8592a0-091a-48ce-996c-f42bbdaf240c\") " pod="openstack/openstack-galera-0" Sep 29 19:13:38 crc kubenswrapper[4792]: I0929 19:13:38.741301 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/5c8592a0-091a-48ce-996c-f42bbdaf240c-config-data-default\") pod \"openstack-galera-0\" (UID: \"5c8592a0-091a-48ce-996c-f42bbdaf240c\") " pod="openstack/openstack-galera-0" Sep 29 19:13:38 crc kubenswrapper[4792]: I0929 19:13:38.742473 4792 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"openstack-cell1-galera-0\" (UID: \"9a26454d-9ce8-4591-a7dd-6f8d4df5e3a4\") device mount path \"/mnt/openstack/pv02\"" pod="openstack/openstack-cell1-galera-0" Sep 29 19:13:38 crc kubenswrapper[4792]: I0929 19:13:38.742773 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/5c8592a0-091a-48ce-996c-f42bbdaf240c-kolla-config\") pod \"openstack-galera-0\" (UID: \"5c8592a0-091a-48ce-996c-f42bbdaf240c\") " pod="openstack/openstack-galera-0" Sep 29 19:13:38 crc kubenswrapper[4792]: I0929 19:13:38.745277 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/9a26454d-9ce8-4591-a7dd-6f8d4df5e3a4-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"9a26454d-9ce8-4591-a7dd-6f8d4df5e3a4\") " pod="openstack/openstack-cell1-galera-0" Sep 29 19:13:38 crc kubenswrapper[4792]: I0929 19:13:38.748202 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/9a26454d-9ce8-4591-a7dd-6f8d4df5e3a4-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"9a26454d-9ce8-4591-a7dd-6f8d4df5e3a4\") " pod="openstack/openstack-cell1-galera-0" Sep 29 19:13:38 crc kubenswrapper[4792]: I0929 19:13:38.757544 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/9a26454d-9ce8-4591-a7dd-6f8d4df5e3a4-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"9a26454d-9ce8-4591-a7dd-6f8d4df5e3a4\") " pod="openstack/openstack-cell1-galera-0" Sep 29 19:13:38 crc kubenswrapper[4792]: I0929 19:13:38.758264 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5c8592a0-091a-48ce-996c-f42bbdaf240c-operator-scripts\") pod \"openstack-galera-0\" (UID: \"5c8592a0-091a-48ce-996c-f42bbdaf240c\") " pod="openstack/openstack-galera-0" Sep 29 19:13:38 crc kubenswrapper[4792]: I0929 19:13:38.760056 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/5c8592a0-091a-48ce-996c-f42bbdaf240c-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"5c8592a0-091a-48ce-996c-f42bbdaf240c\") " pod="openstack/openstack-galera-0" Sep 29 19:13:38 crc kubenswrapper[4792]: I0929 19:13:38.761870 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9a26454d-9ce8-4591-a7dd-6f8d4df5e3a4-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"9a26454d-9ce8-4591-a7dd-6f8d4df5e3a4\") " pod="openstack/openstack-cell1-galera-0" Sep 29 19:13:38 crc kubenswrapper[4792]: I0929 19:13:38.777106 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/9a26454d-9ce8-4591-a7dd-6f8d4df5e3a4-secrets\") pod \"openstack-cell1-galera-0\" (UID: \"9a26454d-9ce8-4591-a7dd-6f8d4df5e3a4\") " pod="openstack/openstack-cell1-galera-0" Sep 29 19:13:38 crc kubenswrapper[4792]: I0929 19:13:38.787827 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5c8592a0-091a-48ce-996c-f42bbdaf240c-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"5c8592a0-091a-48ce-996c-f42bbdaf240c\") " pod="openstack/openstack-galera-0" Sep 29 19:13:38 crc kubenswrapper[4792]: I0929 19:13:38.795322 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9a26454d-9ce8-4591-a7dd-6f8d4df5e3a4-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"9a26454d-9ce8-4591-a7dd-6f8d4df5e3a4\") " pod="openstack/openstack-cell1-galera-0" Sep 29 19:13:38 crc kubenswrapper[4792]: I0929 19:13:38.795417 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j5vdb\" (UniqueName: \"kubernetes.io/projected/9a26454d-9ce8-4591-a7dd-6f8d4df5e3a4-kube-api-access-j5vdb\") pod \"openstack-cell1-galera-0\" (UID: \"9a26454d-9ce8-4591-a7dd-6f8d4df5e3a4\") " pod="openstack/openstack-cell1-galera-0" Sep 29 19:13:38 crc kubenswrapper[4792]: I0929 19:13:38.801430 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"openstack-galera-0\" (UID: \"5c8592a0-091a-48ce-996c-f42bbdaf240c\") " pod="openstack/openstack-galera-0" Sep 29 19:13:38 crc kubenswrapper[4792]: I0929 19:13:38.804568 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/5c8592a0-091a-48ce-996c-f42bbdaf240c-secrets\") pod \"openstack-galera-0\" (UID: \"5c8592a0-091a-48ce-996c-f42bbdaf240c\") " pod="openstack/openstack-galera-0" Sep 29 19:13:38 crc kubenswrapper[4792]: I0929 19:13:38.813364 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"openstack-cell1-galera-0\" (UID: \"9a26454d-9ce8-4591-a7dd-6f8d4df5e3a4\") " pod="openstack/openstack-cell1-galera-0" Sep 29 19:13:38 crc kubenswrapper[4792]: I0929 19:13:38.813658 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qm86n\" (UniqueName: \"kubernetes.io/projected/5c8592a0-091a-48ce-996c-f42bbdaf240c-kube-api-access-qm86n\") pod \"openstack-galera-0\" (UID: \"5c8592a0-091a-48ce-996c-f42bbdaf240c\") " pod="openstack/openstack-galera-0" Sep 29 19:13:38 crc kubenswrapper[4792]: I0929 19:13:38.863961 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Sep 29 19:13:38 crc kubenswrapper[4792]: I0929 19:13:38.917563 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Sep 29 19:13:39 crc kubenswrapper[4792]: I0929 19:13:39.082404 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/memcached-0"] Sep 29 19:13:39 crc kubenswrapper[4792]: I0929 19:13:39.083238 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/memcached-0"] Sep 29 19:13:39 crc kubenswrapper[4792]: I0929 19:13:39.083312 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Sep 29 19:13:39 crc kubenswrapper[4792]: I0929 19:13:39.089397 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"memcached-memcached-dockercfg-thlg2" Sep 29 19:13:39 crc kubenswrapper[4792]: I0929 19:13:39.089545 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"memcached-config-data" Sep 29 19:13:39 crc kubenswrapper[4792]: I0929 19:13:39.089679 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-memcached-svc" Sep 29 19:13:39 crc kubenswrapper[4792]: I0929 19:13:39.160901 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mc6hw\" (UniqueName: \"kubernetes.io/projected/ea8bd43c-bb10-450a-b564-c7b4247d1252-kube-api-access-mc6hw\") pod \"memcached-0\" (UID: \"ea8bd43c-bb10-450a-b564-c7b4247d1252\") " pod="openstack/memcached-0" Sep 29 19:13:39 crc kubenswrapper[4792]: I0929 19:13:39.160989 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/ea8bd43c-bb10-450a-b564-c7b4247d1252-config-data\") pod \"memcached-0\" (UID: \"ea8bd43c-bb10-450a-b564-c7b4247d1252\") " pod="openstack/memcached-0" Sep 29 19:13:39 crc kubenswrapper[4792]: I0929 19:13:39.161037 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/ea8bd43c-bb10-450a-b564-c7b4247d1252-kolla-config\") pod \"memcached-0\" (UID: \"ea8bd43c-bb10-450a-b564-c7b4247d1252\") " pod="openstack/memcached-0" Sep 29 19:13:39 crc kubenswrapper[4792]: I0929 19:13:39.161083 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/ea8bd43c-bb10-450a-b564-c7b4247d1252-memcached-tls-certs\") pod \"memcached-0\" (UID: \"ea8bd43c-bb10-450a-b564-c7b4247d1252\") " pod="openstack/memcached-0" Sep 29 19:13:39 crc kubenswrapper[4792]: I0929 19:13:39.161100 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ea8bd43c-bb10-450a-b564-c7b4247d1252-combined-ca-bundle\") pod \"memcached-0\" (UID: \"ea8bd43c-bb10-450a-b564-c7b4247d1252\") " pod="openstack/memcached-0" Sep 29 19:13:39 crc kubenswrapper[4792]: I0929 19:13:39.267165 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/ea8bd43c-bb10-450a-b564-c7b4247d1252-memcached-tls-certs\") pod \"memcached-0\" (UID: \"ea8bd43c-bb10-450a-b564-c7b4247d1252\") " pod="openstack/memcached-0" Sep 29 19:13:39 crc kubenswrapper[4792]: I0929 19:13:39.267691 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ea8bd43c-bb10-450a-b564-c7b4247d1252-combined-ca-bundle\") pod \"memcached-0\" (UID: \"ea8bd43c-bb10-450a-b564-c7b4247d1252\") " pod="openstack/memcached-0" Sep 29 19:13:39 crc kubenswrapper[4792]: I0929 19:13:39.267907 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mc6hw\" (UniqueName: \"kubernetes.io/projected/ea8bd43c-bb10-450a-b564-c7b4247d1252-kube-api-access-mc6hw\") pod \"memcached-0\" (UID: \"ea8bd43c-bb10-450a-b564-c7b4247d1252\") " pod="openstack/memcached-0" Sep 29 19:13:39 crc kubenswrapper[4792]: I0929 19:13:39.268085 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/ea8bd43c-bb10-450a-b564-c7b4247d1252-config-data\") pod \"memcached-0\" (UID: \"ea8bd43c-bb10-450a-b564-c7b4247d1252\") " pod="openstack/memcached-0" Sep 29 19:13:39 crc kubenswrapper[4792]: I0929 19:13:39.268196 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/ea8bd43c-bb10-450a-b564-c7b4247d1252-kolla-config\") pod \"memcached-0\" (UID: \"ea8bd43c-bb10-450a-b564-c7b4247d1252\") " pod="openstack/memcached-0" Sep 29 19:13:39 crc kubenswrapper[4792]: I0929 19:13:39.269174 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/ea8bd43c-bb10-450a-b564-c7b4247d1252-kolla-config\") pod \"memcached-0\" (UID: \"ea8bd43c-bb10-450a-b564-c7b4247d1252\") " pod="openstack/memcached-0" Sep 29 19:13:39 crc kubenswrapper[4792]: I0929 19:13:39.280455 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/ea8bd43c-bb10-450a-b564-c7b4247d1252-config-data\") pod \"memcached-0\" (UID: \"ea8bd43c-bb10-450a-b564-c7b4247d1252\") " pod="openstack/memcached-0" Sep 29 19:13:39 crc kubenswrapper[4792]: I0929 19:13:39.291267 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/ea8bd43c-bb10-450a-b564-c7b4247d1252-memcached-tls-certs\") pod \"memcached-0\" (UID: \"ea8bd43c-bb10-450a-b564-c7b4247d1252\") " pod="openstack/memcached-0" Sep 29 19:13:39 crc kubenswrapper[4792]: I0929 19:13:39.300983 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ea8bd43c-bb10-450a-b564-c7b4247d1252-combined-ca-bundle\") pod \"memcached-0\" (UID: \"ea8bd43c-bb10-450a-b564-c7b4247d1252\") " pod="openstack/memcached-0" Sep 29 19:13:39 crc kubenswrapper[4792]: I0929 19:13:39.304295 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mc6hw\" (UniqueName: \"kubernetes.io/projected/ea8bd43c-bb10-450a-b564-c7b4247d1252-kube-api-access-mc6hw\") pod \"memcached-0\" (UID: \"ea8bd43c-bb10-450a-b564-c7b4247d1252\") " pod="openstack/memcached-0" Sep 29 19:13:39 crc kubenswrapper[4792]: I0929 19:13:39.437679 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Sep 29 19:13:39 crc kubenswrapper[4792]: I0929 19:13:39.670776 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-galera-0"] Sep 29 19:13:39 crc kubenswrapper[4792]: I0929 19:13:39.779649 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-cell1-galera-0"] Sep 29 19:13:39 crc kubenswrapper[4792]: W0929 19:13:39.799738 4792 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9a26454d_9ce8_4591_a7dd_6f8d4df5e3a4.slice/crio-d4ebec168c8394b59304d0959fd81ffb450f716873b5e824d55c553fca20782b WatchSource:0}: Error finding container d4ebec168c8394b59304d0959fd81ffb450f716873b5e824d55c553fca20782b: Status 404 returned error can't find the container with id d4ebec168c8394b59304d0959fd81ffb450f716873b5e824d55c553fca20782b Sep 29 19:13:39 crc kubenswrapper[4792]: I0929 19:13:39.988315 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/memcached-0"] Sep 29 19:13:40 crc kubenswrapper[4792]: W0929 19:13:40.001737 4792 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podea8bd43c_bb10_450a_b564_c7b4247d1252.slice/crio-10eb456e740e28163a7bb2244fed9e7b42e41d6e9ff7e8df5a2fc59dfe1c09aa WatchSource:0}: Error finding container 10eb456e740e28163a7bb2244fed9e7b42e41d6e9ff7e8df5a2fc59dfe1c09aa: Status 404 returned error can't find the container with id 10eb456e740e28163a7bb2244fed9e7b42e41d6e9ff7e8df5a2fc59dfe1c09aa Sep 29 19:13:40 crc kubenswrapper[4792]: I0929 19:13:40.042794 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"9a26454d-9ce8-4591-a7dd-6f8d4df5e3a4","Type":"ContainerStarted","Data":"d4ebec168c8394b59304d0959fd81ffb450f716873b5e824d55c553fca20782b"} Sep 29 19:13:40 crc kubenswrapper[4792]: I0929 19:13:40.046294 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"5c8592a0-091a-48ce-996c-f42bbdaf240c","Type":"ContainerStarted","Data":"836528cec448ff34d6df18c21ab3e927ae6d1624407677145f07b9bed46174ad"} Sep 29 19:13:41 crc kubenswrapper[4792]: I0929 19:13:41.051887 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/kube-state-metrics-0"] Sep 29 19:13:41 crc kubenswrapper[4792]: I0929 19:13:41.053358 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Sep 29 19:13:41 crc kubenswrapper[4792]: I0929 19:13:41.055787 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"telemetry-ceilometer-dockercfg-hw982" Sep 29 19:13:41 crc kubenswrapper[4792]: I0929 19:13:41.068957 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Sep 29 19:13:41 crc kubenswrapper[4792]: I0929 19:13:41.073666 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"ea8bd43c-bb10-450a-b564-c7b4247d1252","Type":"ContainerStarted","Data":"10eb456e740e28163a7bb2244fed9e7b42e41d6e9ff7e8df5a2fc59dfe1c09aa"} Sep 29 19:13:41 crc kubenswrapper[4792]: I0929 19:13:41.112594 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7wg66\" (UniqueName: \"kubernetes.io/projected/a90baac4-d8fc-472d-992f-c1a36805b12e-kube-api-access-7wg66\") pod \"kube-state-metrics-0\" (UID: \"a90baac4-d8fc-472d-992f-c1a36805b12e\") " pod="openstack/kube-state-metrics-0" Sep 29 19:13:41 crc kubenswrapper[4792]: I0929 19:13:41.214011 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7wg66\" (UniqueName: \"kubernetes.io/projected/a90baac4-d8fc-472d-992f-c1a36805b12e-kube-api-access-7wg66\") pod \"kube-state-metrics-0\" (UID: \"a90baac4-d8fc-472d-992f-c1a36805b12e\") " pod="openstack/kube-state-metrics-0" Sep 29 19:13:41 crc kubenswrapper[4792]: I0929 19:13:41.241106 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7wg66\" (UniqueName: \"kubernetes.io/projected/a90baac4-d8fc-472d-992f-c1a36805b12e-kube-api-access-7wg66\") pod \"kube-state-metrics-0\" (UID: \"a90baac4-d8fc-472d-992f-c1a36805b12e\") " pod="openstack/kube-state-metrics-0" Sep 29 19:13:41 crc kubenswrapper[4792]: I0929 19:13:41.454576 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Sep 29 19:13:41 crc kubenswrapper[4792]: I0929 19:13:41.961571 4792 patch_prober.go:28] interesting pod/machine-config-daemon-p5q59 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 19:13:41 crc kubenswrapper[4792]: I0929 19:13:41.961884 4792 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" podUID="0ae66548-086e-4ca9-bd6f-281ce46e7557" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 19:13:45 crc kubenswrapper[4792]: I0929 19:13:45.373621 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-nb-0"] Sep 29 19:13:45 crc kubenswrapper[4792]: I0929 19:13:45.375500 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Sep 29 19:13:45 crc kubenswrapper[4792]: I0929 19:13:45.380114 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovn-metrics" Sep 29 19:13:45 crc kubenswrapper[4792]: I0929 19:13:45.380333 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-nb-config" Sep 29 19:13:45 crc kubenswrapper[4792]: I0929 19:13:45.380446 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncluster-ovndbcluster-nb-dockercfg-zpzms" Sep 29 19:13:45 crc kubenswrapper[4792]: I0929 19:13:45.381385 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovndbcluster-nb-ovndbs" Sep 29 19:13:45 crc kubenswrapper[4792]: I0929 19:13:45.381791 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-nb-scripts" Sep 29 19:13:45 crc kubenswrapper[4792]: I0929 19:13:45.388615 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-0"] Sep 29 19:13:45 crc kubenswrapper[4792]: I0929 19:13:45.408060 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/da980a6f-8dcf-4d5b-a972-fc646865967c-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"da980a6f-8dcf-4d5b-a972-fc646865967c\") " pod="openstack/ovsdbserver-nb-0" Sep 29 19:13:45 crc kubenswrapper[4792]: I0929 19:13:45.408094 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"ovsdbserver-nb-0\" (UID: \"da980a6f-8dcf-4d5b-a972-fc646865967c\") " pod="openstack/ovsdbserver-nb-0" Sep 29 19:13:45 crc kubenswrapper[4792]: I0929 19:13:45.408143 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/da980a6f-8dcf-4d5b-a972-fc646865967c-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"da980a6f-8dcf-4d5b-a972-fc646865967c\") " pod="openstack/ovsdbserver-nb-0" Sep 29 19:13:45 crc kubenswrapper[4792]: I0929 19:13:45.408207 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/da980a6f-8dcf-4d5b-a972-fc646865967c-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"da980a6f-8dcf-4d5b-a972-fc646865967c\") " pod="openstack/ovsdbserver-nb-0" Sep 29 19:13:45 crc kubenswrapper[4792]: I0929 19:13:45.408232 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/da980a6f-8dcf-4d5b-a972-fc646865967c-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"da980a6f-8dcf-4d5b-a972-fc646865967c\") " pod="openstack/ovsdbserver-nb-0" Sep 29 19:13:45 crc kubenswrapper[4792]: I0929 19:13:45.408255 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w6p6p\" (UniqueName: \"kubernetes.io/projected/da980a6f-8dcf-4d5b-a972-fc646865967c-kube-api-access-w6p6p\") pod \"ovsdbserver-nb-0\" (UID: \"da980a6f-8dcf-4d5b-a972-fc646865967c\") " pod="openstack/ovsdbserver-nb-0" Sep 29 19:13:45 crc kubenswrapper[4792]: I0929 19:13:45.408279 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/da980a6f-8dcf-4d5b-a972-fc646865967c-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"da980a6f-8dcf-4d5b-a972-fc646865967c\") " pod="openstack/ovsdbserver-nb-0" Sep 29 19:13:45 crc kubenswrapper[4792]: I0929 19:13:45.408299 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/da980a6f-8dcf-4d5b-a972-fc646865967c-config\") pod \"ovsdbserver-nb-0\" (UID: \"da980a6f-8dcf-4d5b-a972-fc646865967c\") " pod="openstack/ovsdbserver-nb-0" Sep 29 19:13:45 crc kubenswrapper[4792]: I0929 19:13:45.509781 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/da980a6f-8dcf-4d5b-a972-fc646865967c-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"da980a6f-8dcf-4d5b-a972-fc646865967c\") " pod="openstack/ovsdbserver-nb-0" Sep 29 19:13:45 crc kubenswrapper[4792]: I0929 19:13:45.509837 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/da980a6f-8dcf-4d5b-a972-fc646865967c-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"da980a6f-8dcf-4d5b-a972-fc646865967c\") " pod="openstack/ovsdbserver-nb-0" Sep 29 19:13:45 crc kubenswrapper[4792]: I0929 19:13:45.509872 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/da980a6f-8dcf-4d5b-a972-fc646865967c-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"da980a6f-8dcf-4d5b-a972-fc646865967c\") " pod="openstack/ovsdbserver-nb-0" Sep 29 19:13:45 crc kubenswrapper[4792]: I0929 19:13:45.509898 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w6p6p\" (UniqueName: \"kubernetes.io/projected/da980a6f-8dcf-4d5b-a972-fc646865967c-kube-api-access-w6p6p\") pod \"ovsdbserver-nb-0\" (UID: \"da980a6f-8dcf-4d5b-a972-fc646865967c\") " pod="openstack/ovsdbserver-nb-0" Sep 29 19:13:45 crc kubenswrapper[4792]: I0929 19:13:45.509922 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/da980a6f-8dcf-4d5b-a972-fc646865967c-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"da980a6f-8dcf-4d5b-a972-fc646865967c\") " pod="openstack/ovsdbserver-nb-0" Sep 29 19:13:45 crc kubenswrapper[4792]: I0929 19:13:45.509944 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/da980a6f-8dcf-4d5b-a972-fc646865967c-config\") pod \"ovsdbserver-nb-0\" (UID: \"da980a6f-8dcf-4d5b-a972-fc646865967c\") " pod="openstack/ovsdbserver-nb-0" Sep 29 19:13:45 crc kubenswrapper[4792]: I0929 19:13:45.509988 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/da980a6f-8dcf-4d5b-a972-fc646865967c-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"da980a6f-8dcf-4d5b-a972-fc646865967c\") " pod="openstack/ovsdbserver-nb-0" Sep 29 19:13:45 crc kubenswrapper[4792]: I0929 19:13:45.510007 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"ovsdbserver-nb-0\" (UID: \"da980a6f-8dcf-4d5b-a972-fc646865967c\") " pod="openstack/ovsdbserver-nb-0" Sep 29 19:13:45 crc kubenswrapper[4792]: I0929 19:13:45.510316 4792 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"ovsdbserver-nb-0\" (UID: \"da980a6f-8dcf-4d5b-a972-fc646865967c\") device mount path \"/mnt/openstack/pv05\"" pod="openstack/ovsdbserver-nb-0" Sep 29 19:13:45 crc kubenswrapper[4792]: I0929 19:13:45.511723 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/da980a6f-8dcf-4d5b-a972-fc646865967c-config\") pod \"ovsdbserver-nb-0\" (UID: \"da980a6f-8dcf-4d5b-a972-fc646865967c\") " pod="openstack/ovsdbserver-nb-0" Sep 29 19:13:45 crc kubenswrapper[4792]: I0929 19:13:45.512009 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/da980a6f-8dcf-4d5b-a972-fc646865967c-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"da980a6f-8dcf-4d5b-a972-fc646865967c\") " pod="openstack/ovsdbserver-nb-0" Sep 29 19:13:45 crc kubenswrapper[4792]: I0929 19:13:45.512731 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/da980a6f-8dcf-4d5b-a972-fc646865967c-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"da980a6f-8dcf-4d5b-a972-fc646865967c\") " pod="openstack/ovsdbserver-nb-0" Sep 29 19:13:45 crc kubenswrapper[4792]: I0929 19:13:45.522730 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/da980a6f-8dcf-4d5b-a972-fc646865967c-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"da980a6f-8dcf-4d5b-a972-fc646865967c\") " pod="openstack/ovsdbserver-nb-0" Sep 29 19:13:45 crc kubenswrapper[4792]: I0929 19:13:45.523025 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/da980a6f-8dcf-4d5b-a972-fc646865967c-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"da980a6f-8dcf-4d5b-a972-fc646865967c\") " pod="openstack/ovsdbserver-nb-0" Sep 29 19:13:45 crc kubenswrapper[4792]: I0929 19:13:45.523716 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/da980a6f-8dcf-4d5b-a972-fc646865967c-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"da980a6f-8dcf-4d5b-a972-fc646865967c\") " pod="openstack/ovsdbserver-nb-0" Sep 29 19:13:45 crc kubenswrapper[4792]: I0929 19:13:45.525685 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w6p6p\" (UniqueName: \"kubernetes.io/projected/da980a6f-8dcf-4d5b-a972-fc646865967c-kube-api-access-w6p6p\") pod \"ovsdbserver-nb-0\" (UID: \"da980a6f-8dcf-4d5b-a972-fc646865967c\") " pod="openstack/ovsdbserver-nb-0" Sep 29 19:13:45 crc kubenswrapper[4792]: I0929 19:13:45.534321 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"ovsdbserver-nb-0\" (UID: \"da980a6f-8dcf-4d5b-a972-fc646865967c\") " pod="openstack/ovsdbserver-nb-0" Sep 29 19:13:45 crc kubenswrapper[4792]: I0929 19:13:45.699449 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Sep 29 19:13:45 crc kubenswrapper[4792]: I0929 19:13:45.844178 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-zvckm"] Sep 29 19:13:45 crc kubenswrapper[4792]: I0929 19:13:45.845069 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-zvckm" Sep 29 19:13:45 crc kubenswrapper[4792]: I0929 19:13:45.850331 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovncontroller-ovndbs" Sep 29 19:13:45 crc kubenswrapper[4792]: I0929 19:13:45.850662 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-scripts" Sep 29 19:13:45 crc kubenswrapper[4792]: I0929 19:13:45.851155 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncontroller-ovncontroller-dockercfg-l6zhk" Sep 29 19:13:45 crc kubenswrapper[4792]: I0929 19:13:45.862725 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-zvckm"] Sep 29 19:13:45 crc kubenswrapper[4792]: I0929 19:13:45.914278 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-ovs-mh2vn"] Sep 29 19:13:45 crc kubenswrapper[4792]: I0929 19:13:45.915607 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/321cc22b-3e6d-429f-aba5-d69c973d889e-ovn-controller-tls-certs\") pod \"ovn-controller-zvckm\" (UID: \"321cc22b-3e6d-429f-aba5-d69c973d889e\") " pod="openstack/ovn-controller-zvckm" Sep 29 19:13:45 crc kubenswrapper[4792]: I0929 19:13:45.915774 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ovs-mh2vn" Sep 29 19:13:45 crc kubenswrapper[4792]: I0929 19:13:45.916225 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/321cc22b-3e6d-429f-aba5-d69c973d889e-var-run\") pod \"ovn-controller-zvckm\" (UID: \"321cc22b-3e6d-429f-aba5-d69c973d889e\") " pod="openstack/ovn-controller-zvckm" Sep 29 19:13:45 crc kubenswrapper[4792]: I0929 19:13:45.916342 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/321cc22b-3e6d-429f-aba5-d69c973d889e-scripts\") pod \"ovn-controller-zvckm\" (UID: \"321cc22b-3e6d-429f-aba5-d69c973d889e\") " pod="openstack/ovn-controller-zvckm" Sep 29 19:13:45 crc kubenswrapper[4792]: I0929 19:13:45.916484 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/321cc22b-3e6d-429f-aba5-d69c973d889e-combined-ca-bundle\") pod \"ovn-controller-zvckm\" (UID: \"321cc22b-3e6d-429f-aba5-d69c973d889e\") " pod="openstack/ovn-controller-zvckm" Sep 29 19:13:45 crc kubenswrapper[4792]: I0929 19:13:45.916633 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/321cc22b-3e6d-429f-aba5-d69c973d889e-var-log-ovn\") pod \"ovn-controller-zvckm\" (UID: \"321cc22b-3e6d-429f-aba5-d69c973d889e\") " pod="openstack/ovn-controller-zvckm" Sep 29 19:13:45 crc kubenswrapper[4792]: I0929 19:13:45.916765 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h75rq\" (UniqueName: \"kubernetes.io/projected/321cc22b-3e6d-429f-aba5-d69c973d889e-kube-api-access-h75rq\") pod \"ovn-controller-zvckm\" (UID: \"321cc22b-3e6d-429f-aba5-d69c973d889e\") " pod="openstack/ovn-controller-zvckm" Sep 29 19:13:45 crc kubenswrapper[4792]: I0929 19:13:45.916994 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/321cc22b-3e6d-429f-aba5-d69c973d889e-var-run-ovn\") pod \"ovn-controller-zvckm\" (UID: \"321cc22b-3e6d-429f-aba5-d69c973d889e\") " pod="openstack/ovn-controller-zvckm" Sep 29 19:13:45 crc kubenswrapper[4792]: I0929 19:13:45.973830 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-ovs-mh2vn"] Sep 29 19:13:46 crc kubenswrapper[4792]: I0929 19:13:46.018199 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/321cc22b-3e6d-429f-aba5-d69c973d889e-combined-ca-bundle\") pod \"ovn-controller-zvckm\" (UID: \"321cc22b-3e6d-429f-aba5-d69c973d889e\") " pod="openstack/ovn-controller-zvckm" Sep 29 19:13:46 crc kubenswrapper[4792]: I0929 19:13:46.018272 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/321cc22b-3e6d-429f-aba5-d69c973d889e-var-log-ovn\") pod \"ovn-controller-zvckm\" (UID: \"321cc22b-3e6d-429f-aba5-d69c973d889e\") " pod="openstack/ovn-controller-zvckm" Sep 29 19:13:46 crc kubenswrapper[4792]: I0929 19:13:46.018295 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/904d363d-f0dc-4318-9f28-d06e374a4838-var-lib\") pod \"ovn-controller-ovs-mh2vn\" (UID: \"904d363d-f0dc-4318-9f28-d06e374a4838\") " pod="openstack/ovn-controller-ovs-mh2vn" Sep 29 19:13:46 crc kubenswrapper[4792]: I0929 19:13:46.018324 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h75rq\" (UniqueName: \"kubernetes.io/projected/321cc22b-3e6d-429f-aba5-d69c973d889e-kube-api-access-h75rq\") pod \"ovn-controller-zvckm\" (UID: \"321cc22b-3e6d-429f-aba5-d69c973d889e\") " pod="openstack/ovn-controller-zvckm" Sep 29 19:13:46 crc kubenswrapper[4792]: I0929 19:13:46.018355 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/321cc22b-3e6d-429f-aba5-d69c973d889e-var-run-ovn\") pod \"ovn-controller-zvckm\" (UID: \"321cc22b-3e6d-429f-aba5-d69c973d889e\") " pod="openstack/ovn-controller-zvckm" Sep 29 19:13:46 crc kubenswrapper[4792]: I0929 19:13:46.018374 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n5t88\" (UniqueName: \"kubernetes.io/projected/904d363d-f0dc-4318-9f28-d06e374a4838-kube-api-access-n5t88\") pod \"ovn-controller-ovs-mh2vn\" (UID: \"904d363d-f0dc-4318-9f28-d06e374a4838\") " pod="openstack/ovn-controller-ovs-mh2vn" Sep 29 19:13:46 crc kubenswrapper[4792]: I0929 19:13:46.018399 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/904d363d-f0dc-4318-9f28-d06e374a4838-etc-ovs\") pod \"ovn-controller-ovs-mh2vn\" (UID: \"904d363d-f0dc-4318-9f28-d06e374a4838\") " pod="openstack/ovn-controller-ovs-mh2vn" Sep 29 19:13:46 crc kubenswrapper[4792]: I0929 19:13:46.018414 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/904d363d-f0dc-4318-9f28-d06e374a4838-scripts\") pod \"ovn-controller-ovs-mh2vn\" (UID: \"904d363d-f0dc-4318-9f28-d06e374a4838\") " pod="openstack/ovn-controller-ovs-mh2vn" Sep 29 19:13:46 crc kubenswrapper[4792]: I0929 19:13:46.018447 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/321cc22b-3e6d-429f-aba5-d69c973d889e-ovn-controller-tls-certs\") pod \"ovn-controller-zvckm\" (UID: \"321cc22b-3e6d-429f-aba5-d69c973d889e\") " pod="openstack/ovn-controller-zvckm" Sep 29 19:13:46 crc kubenswrapper[4792]: I0929 19:13:46.018466 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/904d363d-f0dc-4318-9f28-d06e374a4838-var-run\") pod \"ovn-controller-ovs-mh2vn\" (UID: \"904d363d-f0dc-4318-9f28-d06e374a4838\") " pod="openstack/ovn-controller-ovs-mh2vn" Sep 29 19:13:46 crc kubenswrapper[4792]: I0929 19:13:46.018579 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/904d363d-f0dc-4318-9f28-d06e374a4838-var-log\") pod \"ovn-controller-ovs-mh2vn\" (UID: \"904d363d-f0dc-4318-9f28-d06e374a4838\") " pod="openstack/ovn-controller-ovs-mh2vn" Sep 29 19:13:46 crc kubenswrapper[4792]: I0929 19:13:46.018631 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/321cc22b-3e6d-429f-aba5-d69c973d889e-var-run\") pod \"ovn-controller-zvckm\" (UID: \"321cc22b-3e6d-429f-aba5-d69c973d889e\") " pod="openstack/ovn-controller-zvckm" Sep 29 19:13:46 crc kubenswrapper[4792]: I0929 19:13:46.018653 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/321cc22b-3e6d-429f-aba5-d69c973d889e-scripts\") pod \"ovn-controller-zvckm\" (UID: \"321cc22b-3e6d-429f-aba5-d69c973d889e\") " pod="openstack/ovn-controller-zvckm" Sep 29 19:13:46 crc kubenswrapper[4792]: I0929 19:13:46.018715 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/321cc22b-3e6d-429f-aba5-d69c973d889e-var-log-ovn\") pod \"ovn-controller-zvckm\" (UID: \"321cc22b-3e6d-429f-aba5-d69c973d889e\") " pod="openstack/ovn-controller-zvckm" Sep 29 19:13:46 crc kubenswrapper[4792]: I0929 19:13:46.018763 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/321cc22b-3e6d-429f-aba5-d69c973d889e-var-run-ovn\") pod \"ovn-controller-zvckm\" (UID: \"321cc22b-3e6d-429f-aba5-d69c973d889e\") " pod="openstack/ovn-controller-zvckm" Sep 29 19:13:46 crc kubenswrapper[4792]: I0929 19:13:46.018933 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/321cc22b-3e6d-429f-aba5-d69c973d889e-var-run\") pod \"ovn-controller-zvckm\" (UID: \"321cc22b-3e6d-429f-aba5-d69c973d889e\") " pod="openstack/ovn-controller-zvckm" Sep 29 19:13:46 crc kubenswrapper[4792]: I0929 19:13:46.021756 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/321cc22b-3e6d-429f-aba5-d69c973d889e-combined-ca-bundle\") pod \"ovn-controller-zvckm\" (UID: \"321cc22b-3e6d-429f-aba5-d69c973d889e\") " pod="openstack/ovn-controller-zvckm" Sep 29 19:13:46 crc kubenswrapper[4792]: I0929 19:13:46.022071 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/321cc22b-3e6d-429f-aba5-d69c973d889e-scripts\") pod \"ovn-controller-zvckm\" (UID: \"321cc22b-3e6d-429f-aba5-d69c973d889e\") " pod="openstack/ovn-controller-zvckm" Sep 29 19:13:46 crc kubenswrapper[4792]: I0929 19:13:46.026108 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/321cc22b-3e6d-429f-aba5-d69c973d889e-ovn-controller-tls-certs\") pod \"ovn-controller-zvckm\" (UID: \"321cc22b-3e6d-429f-aba5-d69c973d889e\") " pod="openstack/ovn-controller-zvckm" Sep 29 19:13:46 crc kubenswrapper[4792]: I0929 19:13:46.045349 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h75rq\" (UniqueName: \"kubernetes.io/projected/321cc22b-3e6d-429f-aba5-d69c973d889e-kube-api-access-h75rq\") pod \"ovn-controller-zvckm\" (UID: \"321cc22b-3e6d-429f-aba5-d69c973d889e\") " pod="openstack/ovn-controller-zvckm" Sep 29 19:13:46 crc kubenswrapper[4792]: I0929 19:13:46.119664 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/904d363d-f0dc-4318-9f28-d06e374a4838-var-lib\") pod \"ovn-controller-ovs-mh2vn\" (UID: \"904d363d-f0dc-4318-9f28-d06e374a4838\") " pod="openstack/ovn-controller-ovs-mh2vn" Sep 29 19:13:46 crc kubenswrapper[4792]: I0929 19:13:46.119764 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n5t88\" (UniqueName: \"kubernetes.io/projected/904d363d-f0dc-4318-9f28-d06e374a4838-kube-api-access-n5t88\") pod \"ovn-controller-ovs-mh2vn\" (UID: \"904d363d-f0dc-4318-9f28-d06e374a4838\") " pod="openstack/ovn-controller-ovs-mh2vn" Sep 29 19:13:46 crc kubenswrapper[4792]: I0929 19:13:46.119793 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/904d363d-f0dc-4318-9f28-d06e374a4838-etc-ovs\") pod \"ovn-controller-ovs-mh2vn\" (UID: \"904d363d-f0dc-4318-9f28-d06e374a4838\") " pod="openstack/ovn-controller-ovs-mh2vn" Sep 29 19:13:46 crc kubenswrapper[4792]: I0929 19:13:46.119813 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/904d363d-f0dc-4318-9f28-d06e374a4838-scripts\") pod \"ovn-controller-ovs-mh2vn\" (UID: \"904d363d-f0dc-4318-9f28-d06e374a4838\") " pod="openstack/ovn-controller-ovs-mh2vn" Sep 29 19:13:46 crc kubenswrapper[4792]: I0929 19:13:46.119896 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/904d363d-f0dc-4318-9f28-d06e374a4838-var-run\") pod \"ovn-controller-ovs-mh2vn\" (UID: \"904d363d-f0dc-4318-9f28-d06e374a4838\") " pod="openstack/ovn-controller-ovs-mh2vn" Sep 29 19:13:46 crc kubenswrapper[4792]: I0929 19:13:46.119961 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/904d363d-f0dc-4318-9f28-d06e374a4838-var-log\") pod \"ovn-controller-ovs-mh2vn\" (UID: \"904d363d-f0dc-4318-9f28-d06e374a4838\") " pod="openstack/ovn-controller-ovs-mh2vn" Sep 29 19:13:46 crc kubenswrapper[4792]: I0929 19:13:46.120186 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/904d363d-f0dc-4318-9f28-d06e374a4838-var-log\") pod \"ovn-controller-ovs-mh2vn\" (UID: \"904d363d-f0dc-4318-9f28-d06e374a4838\") " pod="openstack/ovn-controller-ovs-mh2vn" Sep 29 19:13:46 crc kubenswrapper[4792]: I0929 19:13:46.121245 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/904d363d-f0dc-4318-9f28-d06e374a4838-var-run\") pod \"ovn-controller-ovs-mh2vn\" (UID: \"904d363d-f0dc-4318-9f28-d06e374a4838\") " pod="openstack/ovn-controller-ovs-mh2vn" Sep 29 19:13:46 crc kubenswrapper[4792]: I0929 19:13:46.121362 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/904d363d-f0dc-4318-9f28-d06e374a4838-etc-ovs\") pod \"ovn-controller-ovs-mh2vn\" (UID: \"904d363d-f0dc-4318-9f28-d06e374a4838\") " pod="openstack/ovn-controller-ovs-mh2vn" Sep 29 19:13:46 crc kubenswrapper[4792]: I0929 19:13:46.121366 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/904d363d-f0dc-4318-9f28-d06e374a4838-var-lib\") pod \"ovn-controller-ovs-mh2vn\" (UID: \"904d363d-f0dc-4318-9f28-d06e374a4838\") " pod="openstack/ovn-controller-ovs-mh2vn" Sep 29 19:13:46 crc kubenswrapper[4792]: I0929 19:13:46.125896 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/904d363d-f0dc-4318-9f28-d06e374a4838-scripts\") pod \"ovn-controller-ovs-mh2vn\" (UID: \"904d363d-f0dc-4318-9f28-d06e374a4838\") " pod="openstack/ovn-controller-ovs-mh2vn" Sep 29 19:13:46 crc kubenswrapper[4792]: I0929 19:13:46.155596 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n5t88\" (UniqueName: \"kubernetes.io/projected/904d363d-f0dc-4318-9f28-d06e374a4838-kube-api-access-n5t88\") pod \"ovn-controller-ovs-mh2vn\" (UID: \"904d363d-f0dc-4318-9f28-d06e374a4838\") " pod="openstack/ovn-controller-ovs-mh2vn" Sep 29 19:13:46 crc kubenswrapper[4792]: I0929 19:13:46.177074 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-zvckm" Sep 29 19:13:46 crc kubenswrapper[4792]: I0929 19:13:46.275992 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ovs-mh2vn" Sep 29 19:13:48 crc kubenswrapper[4792]: I0929 19:13:48.053387 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-sb-0"] Sep 29 19:13:48 crc kubenswrapper[4792]: I0929 19:13:48.055399 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Sep 29 19:13:48 crc kubenswrapper[4792]: I0929 19:13:48.058520 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncluster-ovndbcluster-sb-dockercfg-f4wj5" Sep 29 19:13:48 crc kubenswrapper[4792]: I0929 19:13:48.058798 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-sb-config" Sep 29 19:13:48 crc kubenswrapper[4792]: I0929 19:13:48.058975 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-sb-scripts" Sep 29 19:13:48 crc kubenswrapper[4792]: I0929 19:13:48.059148 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovndbcluster-sb-ovndbs" Sep 29 19:13:48 crc kubenswrapper[4792]: I0929 19:13:48.068410 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-0"] Sep 29 19:13:48 crc kubenswrapper[4792]: I0929 19:13:48.155895 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/272430d7-51cd-4f45-bfdd-73ed83ab0bc2-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"272430d7-51cd-4f45-bfdd-73ed83ab0bc2\") " pod="openstack/ovsdbserver-sb-0" Sep 29 19:13:48 crc kubenswrapper[4792]: I0929 19:13:48.155943 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/272430d7-51cd-4f45-bfdd-73ed83ab0bc2-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"272430d7-51cd-4f45-bfdd-73ed83ab0bc2\") " pod="openstack/ovsdbserver-sb-0" Sep 29 19:13:48 crc kubenswrapper[4792]: I0929 19:13:48.155975 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c75gs\" (UniqueName: \"kubernetes.io/projected/272430d7-51cd-4f45-bfdd-73ed83ab0bc2-kube-api-access-c75gs\") pod \"ovsdbserver-sb-0\" (UID: \"272430d7-51cd-4f45-bfdd-73ed83ab0bc2\") " pod="openstack/ovsdbserver-sb-0" Sep 29 19:13:48 crc kubenswrapper[4792]: I0929 19:13:48.156020 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/272430d7-51cd-4f45-bfdd-73ed83ab0bc2-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"272430d7-51cd-4f45-bfdd-73ed83ab0bc2\") " pod="openstack/ovsdbserver-sb-0" Sep 29 19:13:48 crc kubenswrapper[4792]: I0929 19:13:48.156094 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"ovsdbserver-sb-0\" (UID: \"272430d7-51cd-4f45-bfdd-73ed83ab0bc2\") " pod="openstack/ovsdbserver-sb-0" Sep 29 19:13:48 crc kubenswrapper[4792]: I0929 19:13:48.156110 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/272430d7-51cd-4f45-bfdd-73ed83ab0bc2-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"272430d7-51cd-4f45-bfdd-73ed83ab0bc2\") " pod="openstack/ovsdbserver-sb-0" Sep 29 19:13:48 crc kubenswrapper[4792]: I0929 19:13:48.156166 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/272430d7-51cd-4f45-bfdd-73ed83ab0bc2-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"272430d7-51cd-4f45-bfdd-73ed83ab0bc2\") " pod="openstack/ovsdbserver-sb-0" Sep 29 19:13:48 crc kubenswrapper[4792]: I0929 19:13:48.156197 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/272430d7-51cd-4f45-bfdd-73ed83ab0bc2-config\") pod \"ovsdbserver-sb-0\" (UID: \"272430d7-51cd-4f45-bfdd-73ed83ab0bc2\") " pod="openstack/ovsdbserver-sb-0" Sep 29 19:13:48 crc kubenswrapper[4792]: I0929 19:13:48.257714 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/272430d7-51cd-4f45-bfdd-73ed83ab0bc2-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"272430d7-51cd-4f45-bfdd-73ed83ab0bc2\") " pod="openstack/ovsdbserver-sb-0" Sep 29 19:13:48 crc kubenswrapper[4792]: I0929 19:13:48.257760 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/272430d7-51cd-4f45-bfdd-73ed83ab0bc2-config\") pod \"ovsdbserver-sb-0\" (UID: \"272430d7-51cd-4f45-bfdd-73ed83ab0bc2\") " pod="openstack/ovsdbserver-sb-0" Sep 29 19:13:48 crc kubenswrapper[4792]: I0929 19:13:48.257806 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/272430d7-51cd-4f45-bfdd-73ed83ab0bc2-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"272430d7-51cd-4f45-bfdd-73ed83ab0bc2\") " pod="openstack/ovsdbserver-sb-0" Sep 29 19:13:48 crc kubenswrapper[4792]: I0929 19:13:48.257821 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/272430d7-51cd-4f45-bfdd-73ed83ab0bc2-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"272430d7-51cd-4f45-bfdd-73ed83ab0bc2\") " pod="openstack/ovsdbserver-sb-0" Sep 29 19:13:48 crc kubenswrapper[4792]: I0929 19:13:48.257861 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c75gs\" (UniqueName: \"kubernetes.io/projected/272430d7-51cd-4f45-bfdd-73ed83ab0bc2-kube-api-access-c75gs\") pod \"ovsdbserver-sb-0\" (UID: \"272430d7-51cd-4f45-bfdd-73ed83ab0bc2\") " pod="openstack/ovsdbserver-sb-0" Sep 29 19:13:48 crc kubenswrapper[4792]: I0929 19:13:48.257902 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/272430d7-51cd-4f45-bfdd-73ed83ab0bc2-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"272430d7-51cd-4f45-bfdd-73ed83ab0bc2\") " pod="openstack/ovsdbserver-sb-0" Sep 29 19:13:48 crc kubenswrapper[4792]: I0929 19:13:48.257932 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"ovsdbserver-sb-0\" (UID: \"272430d7-51cd-4f45-bfdd-73ed83ab0bc2\") " pod="openstack/ovsdbserver-sb-0" Sep 29 19:13:48 crc kubenswrapper[4792]: I0929 19:13:48.257947 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/272430d7-51cd-4f45-bfdd-73ed83ab0bc2-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"272430d7-51cd-4f45-bfdd-73ed83ab0bc2\") " pod="openstack/ovsdbserver-sb-0" Sep 29 19:13:48 crc kubenswrapper[4792]: I0929 19:13:48.258783 4792 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"ovsdbserver-sb-0\" (UID: \"272430d7-51cd-4f45-bfdd-73ed83ab0bc2\") device mount path \"/mnt/openstack/pv10\"" pod="openstack/ovsdbserver-sb-0" Sep 29 19:13:48 crc kubenswrapper[4792]: I0929 19:13:48.258789 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/272430d7-51cd-4f45-bfdd-73ed83ab0bc2-config\") pod \"ovsdbserver-sb-0\" (UID: \"272430d7-51cd-4f45-bfdd-73ed83ab0bc2\") " pod="openstack/ovsdbserver-sb-0" Sep 29 19:13:48 crc kubenswrapper[4792]: I0929 19:13:48.258916 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/272430d7-51cd-4f45-bfdd-73ed83ab0bc2-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"272430d7-51cd-4f45-bfdd-73ed83ab0bc2\") " pod="openstack/ovsdbserver-sb-0" Sep 29 19:13:48 crc kubenswrapper[4792]: I0929 19:13:48.259765 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/272430d7-51cd-4f45-bfdd-73ed83ab0bc2-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"272430d7-51cd-4f45-bfdd-73ed83ab0bc2\") " pod="openstack/ovsdbserver-sb-0" Sep 29 19:13:48 crc kubenswrapper[4792]: I0929 19:13:48.263698 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/272430d7-51cd-4f45-bfdd-73ed83ab0bc2-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"272430d7-51cd-4f45-bfdd-73ed83ab0bc2\") " pod="openstack/ovsdbserver-sb-0" Sep 29 19:13:48 crc kubenswrapper[4792]: I0929 19:13:48.268674 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/272430d7-51cd-4f45-bfdd-73ed83ab0bc2-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"272430d7-51cd-4f45-bfdd-73ed83ab0bc2\") " pod="openstack/ovsdbserver-sb-0" Sep 29 19:13:48 crc kubenswrapper[4792]: I0929 19:13:48.271698 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/272430d7-51cd-4f45-bfdd-73ed83ab0bc2-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"272430d7-51cd-4f45-bfdd-73ed83ab0bc2\") " pod="openstack/ovsdbserver-sb-0" Sep 29 19:13:48 crc kubenswrapper[4792]: I0929 19:13:48.276913 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c75gs\" (UniqueName: \"kubernetes.io/projected/272430d7-51cd-4f45-bfdd-73ed83ab0bc2-kube-api-access-c75gs\") pod \"ovsdbserver-sb-0\" (UID: \"272430d7-51cd-4f45-bfdd-73ed83ab0bc2\") " pod="openstack/ovsdbserver-sb-0" Sep 29 19:13:48 crc kubenswrapper[4792]: I0929 19:13:48.298350 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"ovsdbserver-sb-0\" (UID: \"272430d7-51cd-4f45-bfdd-73ed83ab0bc2\") " pod="openstack/ovsdbserver-sb-0" Sep 29 19:13:48 crc kubenswrapper[4792]: I0929 19:13:48.378005 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Sep 29 19:14:00 crc kubenswrapper[4792]: E0929 19:14:00.014732 4792 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-rabbitmq:current-podified" Sep 29 19:14:00 crc kubenswrapper[4792]: E0929 19:14:00.015406 4792 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:setup-container,Image:quay.io/podified-antelope-centos9/openstack-rabbitmq:current-podified,Command:[sh -c cp /tmp/erlang-cookie-secret/.erlang.cookie /var/lib/rabbitmq/.erlang.cookie && chmod 600 /var/lib/rabbitmq/.erlang.cookie ; cp /tmp/rabbitmq-plugins/enabled_plugins /operator/enabled_plugins ; echo '[default]' > /var/lib/rabbitmq/.rabbitmqadmin.conf && sed -e 's/default_user/username/' -e 's/default_pass/password/' /tmp/default_user.conf >> /var/lib/rabbitmq/.rabbitmqadmin.conf && chmod 600 /var/lib/rabbitmq/.rabbitmqadmin.conf ; sleep 30],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{100 -3} {} 100m DecimalSI},memory: {{524288000 0} {} 500Mi BinarySI},},Requests:ResourceList{cpu: {{100 -3} {} 100m DecimalSI},memory: {{524288000 0} {} 500Mi BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:plugins-conf,ReadOnly:false,MountPath:/tmp/rabbitmq-plugins/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:rabbitmq-erlang-cookie,ReadOnly:false,MountPath:/var/lib/rabbitmq/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:erlang-cookie-secret,ReadOnly:false,MountPath:/tmp/erlang-cookie-secret/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:rabbitmq-plugins,ReadOnly:false,MountPath:/operator,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:persistence,ReadOnly:false,MountPath:/var/lib/rabbitmq/mnesia/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:rabbitmq-confd,ReadOnly:false,MountPath:/tmp/default_user.conf,SubPath:default_user.conf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-f8mjt,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod rabbitmq-server-0_openstack(cf5405ae-97dd-404d-9b0c-4d0faaf961cb): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Sep 29 19:14:00 crc kubenswrapper[4792]: E0929 19:14:00.016599 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"setup-container\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/rabbitmq-server-0" podUID="cf5405ae-97dd-404d-9b0c-4d0faaf961cb" Sep 29 19:14:00 crc kubenswrapper[4792]: E0929 19:14:00.029081 4792 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-rabbitmq:current-podified" Sep 29 19:14:00 crc kubenswrapper[4792]: E0929 19:14:00.029304 4792 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:setup-container,Image:quay.io/podified-antelope-centos9/openstack-rabbitmq:current-podified,Command:[sh -c cp /tmp/erlang-cookie-secret/.erlang.cookie /var/lib/rabbitmq/.erlang.cookie && chmod 600 /var/lib/rabbitmq/.erlang.cookie ; cp /tmp/rabbitmq-plugins/enabled_plugins /operator/enabled_plugins ; echo '[default]' > /var/lib/rabbitmq/.rabbitmqadmin.conf && sed -e 's/default_user/username/' -e 's/default_pass/password/' /tmp/default_user.conf >> /var/lib/rabbitmq/.rabbitmqadmin.conf && chmod 600 /var/lib/rabbitmq/.rabbitmqadmin.conf ; sleep 30],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{100 -3} {} 100m DecimalSI},memory: {{524288000 0} {} 500Mi BinarySI},},Requests:ResourceList{cpu: {{100 -3} {} 100m DecimalSI},memory: {{524288000 0} {} 500Mi BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:plugins-conf,ReadOnly:false,MountPath:/tmp/rabbitmq-plugins/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:rabbitmq-erlang-cookie,ReadOnly:false,MountPath:/var/lib/rabbitmq/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:erlang-cookie-secret,ReadOnly:false,MountPath:/tmp/erlang-cookie-secret/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:rabbitmq-plugins,ReadOnly:false,MountPath:/operator,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:persistence,ReadOnly:false,MountPath:/var/lib/rabbitmq/mnesia/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:rabbitmq-confd,ReadOnly:false,MountPath:/tmp/default_user.conf,SubPath:default_user.conf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-4wznl,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod rabbitmq-cell1-server-0_openstack(62bc84b7-9b21-447c-b1c3-21c4f178ba26): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Sep 29 19:14:00 crc kubenswrapper[4792]: E0929 19:14:00.030598 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"setup-container\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/rabbitmq-cell1-server-0" podUID="62bc84b7-9b21-447c-b1c3-21c4f178ba26" Sep 29 19:14:00 crc kubenswrapper[4792]: E0929 19:14:00.230287 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"setup-container\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-rabbitmq:current-podified\\\"\"" pod="openstack/rabbitmq-cell1-server-0" podUID="62bc84b7-9b21-447c-b1c3-21c4f178ba26" Sep 29 19:14:00 crc kubenswrapper[4792]: E0929 19:14:00.233132 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"setup-container\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-rabbitmq:current-podified\\\"\"" pod="openstack/rabbitmq-server-0" podUID="cf5405ae-97dd-404d-9b0c-4d0faaf961cb" Sep 29 19:14:04 crc kubenswrapper[4792]: E0929 19:14:04.460887 4792 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-memcached:current-podified" Sep 29 19:14:04 crc kubenswrapper[4792]: E0929 19:14:04.461304 4792 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:memcached,Image:quay.io/podified-antelope-centos9/openstack-memcached:current-podified,Command:[/usr/bin/dumb-init -- /usr/local/bin/kolla_start],Args:[],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:memcached,HostPort:0,ContainerPort:11211,Protocol:TCP,HostIP:,},ContainerPort{Name:memcached-tls,HostPort:0,ContainerPort:11212,Protocol:TCP,HostIP:,},},Env:[]EnvVar{EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},EnvVar{Name:POD_IPS,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIPs,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},EnvVar{Name:CONFIG_HASH,Value:n97h69h657h54fh545h6h58dhfh568h5cbhf7h569hdbh68fh77h698h5d9h55bh564hddhb5hcbh655hbfh649h5cdh654hf4h66dh54dh684h56cq,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/kolla/config_files/src,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kolla-config,ReadOnly:true,MountPath:/var/lib/kolla/config_files,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:memcached-tls-certs,ReadOnly:true,MountPath:/var/lib/config-data/tls/certs/memcached.crt,SubPath:tls.crt,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:memcached-tls-certs,ReadOnly:true,MountPath:/var/lib/config-data/tls/private/memcached.key,SubPath:tls.key,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-mc6hw,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:nil,TCPSocket:&TCPSocketAction{Port:{0 11211 },Host:,},GRPC:nil,},InitialDelaySeconds:3,TimeoutSeconds:5,PeriodSeconds:3,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:nil,TCPSocket:&TCPSocketAction{Port:{0 11211 },Host:,},GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:5,PeriodSeconds:5,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*42457,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:*42457,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod memcached-0_openstack(ea8bd43c-bb10-450a-b564-c7b4247d1252): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Sep 29 19:14:04 crc kubenswrapper[4792]: E0929 19:14:04.462829 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"memcached\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/memcached-0" podUID="ea8bd43c-bb10-450a-b564-c7b4247d1252" Sep 29 19:14:04 crc kubenswrapper[4792]: I0929 19:14:04.908214 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-zvckm"] Sep 29 19:14:05 crc kubenswrapper[4792]: E0929 19:14:05.265029 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"memcached\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-memcached:current-podified\\\"\"" pod="openstack/memcached-0" podUID="ea8bd43c-bb10-450a-b564-c7b4247d1252" Sep 29 19:14:05 crc kubenswrapper[4792]: I0929 19:14:05.354917 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-ovs-mh2vn"] Sep 29 19:14:05 crc kubenswrapper[4792]: E0929 19:14:05.376665 4792 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified" Sep 29 19:14:05 crc kubenswrapper[4792]: E0929 19:14:05.376844 4792 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:ndfhb5h667h568h584h5f9h58dh565h664h587h597h577h64bh5c4h66fh647hbdh68ch5c5h68dh686h5f7h64hd7hc6h55fh57bh98h57fh87h5fh57fq,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:dns-svc,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/dns-svc,SubPath:dns-svc,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-k88v9,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-78dd6ddcc-mqhwx_openstack(7481394e-6d8e-4d35-be11-70e33e7d775b): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Sep 29 19:14:05 crc kubenswrapper[4792]: E0929 19:14:05.378270 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-78dd6ddcc-mqhwx" podUID="7481394e-6d8e-4d35-be11-70e33e7d775b" Sep 29 19:14:05 crc kubenswrapper[4792]: E0929 19:14:05.411259 4792 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified" Sep 29 19:14:05 crc kubenswrapper[4792]: E0929 19:14:05.411468 4792 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:nffh5bdhf4h5f8h79h55h77h58fh56dh7bh6fh578hbch55dh68h56bhd9h65dh57ch658hc9h566h666h688h58h65dh684h5d7h6ch575h5d6h88q,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-lpzsx,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-675f4bcbfc-pl8rd_openstack(089abc08-05e8-4e45-a751-cbb48d3fbf6b): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Sep 29 19:14:05 crc kubenswrapper[4792]: E0929 19:14:05.412908 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-675f4bcbfc-pl8rd" podUID="089abc08-05e8-4e45-a751-cbb48d3fbf6b" Sep 29 19:14:05 crc kubenswrapper[4792]: E0929 19:14:05.443553 4792 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified" Sep 29 19:14:05 crc kubenswrapper[4792]: E0929 19:14:05.444492 4792 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n68chd6h679hbfh55fhc6h5ffh5d8h94h56ch589hb4hc5h57bh677hcdh655h8dh667h675h654h66ch567h8fh659h5b4h675h566h55bh54h67dh6dq,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:dns-svc,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/dns-svc,SubPath:dns-svc,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-t6rnk,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-666b6646f7-hwsf9_openstack(a70ef7c9-6099-4acc-9a08-cc4a74aa17bb): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Sep 29 19:14:05 crc kubenswrapper[4792]: E0929 19:14:05.445800 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-666b6646f7-hwsf9" podUID="a70ef7c9-6099-4acc-9a08-cc4a74aa17bb" Sep 29 19:14:05 crc kubenswrapper[4792]: E0929 19:14:05.446047 4792 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified" Sep 29 19:14:05 crc kubenswrapper[4792]: E0929 19:14:05.446221 4792 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n659h4h664hbh658h587h67ch89h587h8fh679hc6hf9h55fh644h5d5h698h68dh5cdh5ffh669h54ch9h689hb8hd4h5bfhd8h5d7h5fh665h574q,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:dns-svc,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/dns-svc,SubPath:dns-svc,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-mzvls,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-57d769cc4f-n5gmr_openstack(79e97193-edc8-43b3-a482-b3e3a0354cb5): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Sep 29 19:14:05 crc kubenswrapper[4792]: E0929 19:14:05.447918 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-57d769cc4f-n5gmr" podUID="79e97193-edc8-43b3-a482-b3e3a0354cb5" Sep 29 19:14:05 crc kubenswrapper[4792]: I0929 19:14:05.871623 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Sep 29 19:14:05 crc kubenswrapper[4792]: W0929 19:14:05.886750 4792 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda90baac4_d8fc_472d_992f_c1a36805b12e.slice/crio-3b7e18102a99e2f53e8b9df540a041b536c59902c9446bcaf71769c7f3c9ce9c WatchSource:0}: Error finding container 3b7e18102a99e2f53e8b9df540a041b536c59902c9446bcaf71769c7f3c9ce9c: Status 404 returned error can't find the container with id 3b7e18102a99e2f53e8b9df540a041b536c59902c9446bcaf71769c7f3c9ce9c Sep 29 19:14:05 crc kubenswrapper[4792]: I0929 19:14:05.931819 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-0"] Sep 29 19:14:05 crc kubenswrapper[4792]: W0929 19:14:05.937044 4792 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod272430d7_51cd_4f45_bfdd_73ed83ab0bc2.slice/crio-1fa4d28752f9a50a68b5843bbd183587e919f7828116bc6f79b45311291c57f8 WatchSource:0}: Error finding container 1fa4d28752f9a50a68b5843bbd183587e919f7828116bc6f79b45311291c57f8: Status 404 returned error can't find the container with id 1fa4d28752f9a50a68b5843bbd183587e919f7828116bc6f79b45311291c57f8 Sep 29 19:14:06 crc kubenswrapper[4792]: I0929 19:14:06.053445 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-0"] Sep 29 19:14:06 crc kubenswrapper[4792]: I0929 19:14:06.272400 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-zvckm" event={"ID":"321cc22b-3e6d-429f-aba5-d69c973d889e","Type":"ContainerStarted","Data":"329b4eeb2209f86414cdcc3b962956cb45fdb6ce64dc6440ff008be714530a69"} Sep 29 19:14:06 crc kubenswrapper[4792]: I0929 19:14:06.274059 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-mh2vn" event={"ID":"904d363d-f0dc-4318-9f28-d06e374a4838","Type":"ContainerStarted","Data":"64c1b8242bc53a25773a344da0db95b0c13dc6c13f407f7525c680f9c22e10f4"} Sep 29 19:14:06 crc kubenswrapper[4792]: I0929 19:14:06.275656 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"272430d7-51cd-4f45-bfdd-73ed83ab0bc2","Type":"ContainerStarted","Data":"1fa4d28752f9a50a68b5843bbd183587e919f7828116bc6f79b45311291c57f8"} Sep 29 19:14:06 crc kubenswrapper[4792]: I0929 19:14:06.277436 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"9a26454d-9ce8-4591-a7dd-6f8d4df5e3a4","Type":"ContainerStarted","Data":"2ef9e3a42f167fa3d91b8c2147833e5ecfc8411ab72b290c66c02ff287637c62"} Sep 29 19:14:06 crc kubenswrapper[4792]: I0929 19:14:06.279686 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"5c8592a0-091a-48ce-996c-f42bbdaf240c","Type":"ContainerStarted","Data":"fd8bf6a0402ed68d572abe02892d5ac4544cf0d983e59b76acaca91b7d570312"} Sep 29 19:14:06 crc kubenswrapper[4792]: I0929 19:14:06.283711 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"da980a6f-8dcf-4d5b-a972-fc646865967c","Type":"ContainerStarted","Data":"23e790701a2f3182e19855f0e7e8a305ed1d23ce8e0b2ab1b548181ded43e9f1"} Sep 29 19:14:06 crc kubenswrapper[4792]: I0929 19:14:06.286879 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"a90baac4-d8fc-472d-992f-c1a36805b12e","Type":"ContainerStarted","Data":"3b7e18102a99e2f53e8b9df540a041b536c59902c9446bcaf71769c7f3c9ce9c"} Sep 29 19:14:06 crc kubenswrapper[4792]: E0929 19:14:06.288556 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified\\\"\"" pod="openstack/dnsmasq-dns-666b6646f7-hwsf9" podUID="a70ef7c9-6099-4acc-9a08-cc4a74aa17bb" Sep 29 19:14:06 crc kubenswrapper[4792]: E0929 19:14:06.289637 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified\\\"\"" pod="openstack/dnsmasq-dns-57d769cc4f-n5gmr" podUID="79e97193-edc8-43b3-a482-b3e3a0354cb5" Sep 29 19:14:06 crc kubenswrapper[4792]: I0929 19:14:06.772804 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78dd6ddcc-mqhwx" Sep 29 19:14:06 crc kubenswrapper[4792]: I0929 19:14:06.777772 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-675f4bcbfc-pl8rd" Sep 29 19:14:06 crc kubenswrapper[4792]: I0929 19:14:06.874730 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7481394e-6d8e-4d35-be11-70e33e7d775b-dns-svc\") pod \"7481394e-6d8e-4d35-be11-70e33e7d775b\" (UID: \"7481394e-6d8e-4d35-be11-70e33e7d775b\") " Sep 29 19:14:06 crc kubenswrapper[4792]: I0929 19:14:06.875141 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-k88v9\" (UniqueName: \"kubernetes.io/projected/7481394e-6d8e-4d35-be11-70e33e7d775b-kube-api-access-k88v9\") pod \"7481394e-6d8e-4d35-be11-70e33e7d775b\" (UID: \"7481394e-6d8e-4d35-be11-70e33e7d775b\") " Sep 29 19:14:06 crc kubenswrapper[4792]: I0929 19:14:06.875160 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/089abc08-05e8-4e45-a751-cbb48d3fbf6b-config\") pod \"089abc08-05e8-4e45-a751-cbb48d3fbf6b\" (UID: \"089abc08-05e8-4e45-a751-cbb48d3fbf6b\") " Sep 29 19:14:06 crc kubenswrapper[4792]: I0929 19:14:06.875515 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/089abc08-05e8-4e45-a751-cbb48d3fbf6b-config" (OuterVolumeSpecName: "config") pod "089abc08-05e8-4e45-a751-cbb48d3fbf6b" (UID: "089abc08-05e8-4e45-a751-cbb48d3fbf6b"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:14:06 crc kubenswrapper[4792]: I0929 19:14:06.875549 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7481394e-6d8e-4d35-be11-70e33e7d775b-config\") pod \"7481394e-6d8e-4d35-be11-70e33e7d775b\" (UID: \"7481394e-6d8e-4d35-be11-70e33e7d775b\") " Sep 29 19:14:06 crc kubenswrapper[4792]: I0929 19:14:06.875597 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lpzsx\" (UniqueName: \"kubernetes.io/projected/089abc08-05e8-4e45-a751-cbb48d3fbf6b-kube-api-access-lpzsx\") pod \"089abc08-05e8-4e45-a751-cbb48d3fbf6b\" (UID: \"089abc08-05e8-4e45-a751-cbb48d3fbf6b\") " Sep 29 19:14:06 crc kubenswrapper[4792]: I0929 19:14:06.875694 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7481394e-6d8e-4d35-be11-70e33e7d775b-config" (OuterVolumeSpecName: "config") pod "7481394e-6d8e-4d35-be11-70e33e7d775b" (UID: "7481394e-6d8e-4d35-be11-70e33e7d775b"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:14:06 crc kubenswrapper[4792]: I0929 19:14:06.876216 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7481394e-6d8e-4d35-be11-70e33e7d775b-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "7481394e-6d8e-4d35-be11-70e33e7d775b" (UID: "7481394e-6d8e-4d35-be11-70e33e7d775b"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:14:06 crc kubenswrapper[4792]: I0929 19:14:06.876573 4792 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/089abc08-05e8-4e45-a751-cbb48d3fbf6b-config\") on node \"crc\" DevicePath \"\"" Sep 29 19:14:06 crc kubenswrapper[4792]: I0929 19:14:06.876591 4792 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7481394e-6d8e-4d35-be11-70e33e7d775b-config\") on node \"crc\" DevicePath \"\"" Sep 29 19:14:06 crc kubenswrapper[4792]: I0929 19:14:06.876601 4792 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7481394e-6d8e-4d35-be11-70e33e7d775b-dns-svc\") on node \"crc\" DevicePath \"\"" Sep 29 19:14:06 crc kubenswrapper[4792]: I0929 19:14:06.882892 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/089abc08-05e8-4e45-a751-cbb48d3fbf6b-kube-api-access-lpzsx" (OuterVolumeSpecName: "kube-api-access-lpzsx") pod "089abc08-05e8-4e45-a751-cbb48d3fbf6b" (UID: "089abc08-05e8-4e45-a751-cbb48d3fbf6b"). InnerVolumeSpecName "kube-api-access-lpzsx". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:14:06 crc kubenswrapper[4792]: I0929 19:14:06.899958 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7481394e-6d8e-4d35-be11-70e33e7d775b-kube-api-access-k88v9" (OuterVolumeSpecName: "kube-api-access-k88v9") pod "7481394e-6d8e-4d35-be11-70e33e7d775b" (UID: "7481394e-6d8e-4d35-be11-70e33e7d775b"). InnerVolumeSpecName "kube-api-access-k88v9". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:14:06 crc kubenswrapper[4792]: I0929 19:14:06.977960 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-k88v9\" (UniqueName: \"kubernetes.io/projected/7481394e-6d8e-4d35-be11-70e33e7d775b-kube-api-access-k88v9\") on node \"crc\" DevicePath \"\"" Sep 29 19:14:06 crc kubenswrapper[4792]: I0929 19:14:06.977990 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lpzsx\" (UniqueName: \"kubernetes.io/projected/089abc08-05e8-4e45-a751-cbb48d3fbf6b-kube-api-access-lpzsx\") on node \"crc\" DevicePath \"\"" Sep 29 19:14:07 crc kubenswrapper[4792]: I0929 19:14:07.300827 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-675f4bcbfc-pl8rd" Sep 29 19:14:07 crc kubenswrapper[4792]: I0929 19:14:07.300827 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-675f4bcbfc-pl8rd" event={"ID":"089abc08-05e8-4e45-a751-cbb48d3fbf6b","Type":"ContainerDied","Data":"86f3bd978df47baaf9147eb2d1c57bef2cc14fafd3cbe83d9c35f3284df86453"} Sep 29 19:14:07 crc kubenswrapper[4792]: I0929 19:14:07.304136 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-78dd6ddcc-mqhwx" event={"ID":"7481394e-6d8e-4d35-be11-70e33e7d775b","Type":"ContainerDied","Data":"8acd6d4adfb716cec233eacefeee7540a424ec91b237a11a1b8db59d056d6a15"} Sep 29 19:14:07 crc kubenswrapper[4792]: I0929 19:14:07.304321 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78dd6ddcc-mqhwx" Sep 29 19:14:07 crc kubenswrapper[4792]: I0929 19:14:07.381492 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-pl8rd"] Sep 29 19:14:07 crc kubenswrapper[4792]: I0929 19:14:07.389256 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-pl8rd"] Sep 29 19:14:07 crc kubenswrapper[4792]: I0929 19:14:07.400231 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-mqhwx"] Sep 29 19:14:07 crc kubenswrapper[4792]: I0929 19:14:07.409244 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-mqhwx"] Sep 29 19:14:09 crc kubenswrapper[4792]: I0929 19:14:09.032863 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="089abc08-05e8-4e45-a751-cbb48d3fbf6b" path="/var/lib/kubelet/pods/089abc08-05e8-4e45-a751-cbb48d3fbf6b/volumes" Sep 29 19:14:09 crc kubenswrapper[4792]: I0929 19:14:09.034252 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7481394e-6d8e-4d35-be11-70e33e7d775b" path="/var/lib/kubelet/pods/7481394e-6d8e-4d35-be11-70e33e7d775b/volumes" Sep 29 19:14:09 crc kubenswrapper[4792]: I0929 19:14:09.320171 4792 generic.go:334] "Generic (PLEG): container finished" podID="9a26454d-9ce8-4591-a7dd-6f8d4df5e3a4" containerID="2ef9e3a42f167fa3d91b8c2147833e5ecfc8411ab72b290c66c02ff287637c62" exitCode=0 Sep 29 19:14:09 crc kubenswrapper[4792]: I0929 19:14:09.320252 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"9a26454d-9ce8-4591-a7dd-6f8d4df5e3a4","Type":"ContainerDied","Data":"2ef9e3a42f167fa3d91b8c2147833e5ecfc8411ab72b290c66c02ff287637c62"} Sep 29 19:14:09 crc kubenswrapper[4792]: I0929 19:14:09.324052 4792 generic.go:334] "Generic (PLEG): container finished" podID="5c8592a0-091a-48ce-996c-f42bbdaf240c" containerID="fd8bf6a0402ed68d572abe02892d5ac4544cf0d983e59b76acaca91b7d570312" exitCode=0 Sep 29 19:14:09 crc kubenswrapper[4792]: I0929 19:14:09.324092 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"5c8592a0-091a-48ce-996c-f42bbdaf240c","Type":"ContainerDied","Data":"fd8bf6a0402ed68d572abe02892d5ac4544cf0d983e59b76acaca91b7d570312"} Sep 29 19:14:11 crc kubenswrapper[4792]: I0929 19:14:11.352407 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"da980a6f-8dcf-4d5b-a972-fc646865967c","Type":"ContainerStarted","Data":"9ced60be35585b9f199ff877b0dca62ee8a8fc85c5ecbb644f4a0248ebf0eda1"} Sep 29 19:14:11 crc kubenswrapper[4792]: I0929 19:14:11.354292 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"a90baac4-d8fc-472d-992f-c1a36805b12e","Type":"ContainerStarted","Data":"b9a386bdcefa8295dc0acdad8dc91228d4d1c67b29a31f173488ab553ce7bf7c"} Sep 29 19:14:11 crc kubenswrapper[4792]: I0929 19:14:11.354994 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/kube-state-metrics-0" Sep 29 19:14:11 crc kubenswrapper[4792]: I0929 19:14:11.361072 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-mh2vn" event={"ID":"904d363d-f0dc-4318-9f28-d06e374a4838","Type":"ContainerStarted","Data":"3019ba06e376e9096b0fe19a75a727f3452ec199503c92e8220405b558ea0ea2"} Sep 29 19:14:11 crc kubenswrapper[4792]: I0929 19:14:11.363951 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"272430d7-51cd-4f45-bfdd-73ed83ab0bc2","Type":"ContainerStarted","Data":"1bae55aaed435b7b331d0e6fac4ee98079f197f8de6dcc294a902aadb040b252"} Sep 29 19:14:11 crc kubenswrapper[4792]: I0929 19:14:11.367569 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"9a26454d-9ce8-4591-a7dd-6f8d4df5e3a4","Type":"ContainerStarted","Data":"162d4766f89c29b7443971146798f7cc60da7b1de6371eeae0ed15562af019d3"} Sep 29 19:14:11 crc kubenswrapper[4792]: I0929 19:14:11.370121 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"5c8592a0-091a-48ce-996c-f42bbdaf240c","Type":"ContainerStarted","Data":"37bf403a353c501f0fec898a9a1718f09f944327c832376d2166682cbac91819"} Sep 29 19:14:11 crc kubenswrapper[4792]: I0929 19:14:11.398458 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstack-galera-0" podStartSLOduration=9.616101305 podStartE2EDuration="34.398442614s" podCreationTimestamp="2025-09-29 19:13:37 +0000 UTC" firstStartedPulling="2025-09-29 19:13:39.694689058 +0000 UTC m=+1031.687996454" lastFinishedPulling="2025-09-29 19:14:04.477030367 +0000 UTC m=+1056.470337763" observedRunningTime="2025-09-29 19:14:11.39373265 +0000 UTC m=+1063.387040056" watchObservedRunningTime="2025-09-29 19:14:11.398442614 +0000 UTC m=+1063.391750010" Sep 29 19:14:11 crc kubenswrapper[4792]: I0929 19:14:11.401549 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/kube-state-metrics-0" podStartSLOduration=25.301987967 podStartE2EDuration="30.401539105s" podCreationTimestamp="2025-09-29 19:13:41 +0000 UTC" firstStartedPulling="2025-09-29 19:14:05.888256268 +0000 UTC m=+1057.881563664" lastFinishedPulling="2025-09-29 19:14:10.987807416 +0000 UTC m=+1062.981114802" observedRunningTime="2025-09-29 19:14:11.376073565 +0000 UTC m=+1063.369380971" watchObservedRunningTime="2025-09-29 19:14:11.401539105 +0000 UTC m=+1063.394846501" Sep 29 19:14:11 crc kubenswrapper[4792]: I0929 19:14:11.421516 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstack-cell1-galera-0" podStartSLOduration=8.805712719 podStartE2EDuration="34.421504011s" podCreationTimestamp="2025-09-29 19:13:37 +0000 UTC" firstStartedPulling="2025-09-29 19:13:39.812328696 +0000 UTC m=+1031.805636082" lastFinishedPulling="2025-09-29 19:14:05.428119978 +0000 UTC m=+1057.421427374" observedRunningTime="2025-09-29 19:14:11.414820255 +0000 UTC m=+1063.408127651" watchObservedRunningTime="2025-09-29 19:14:11.421504011 +0000 UTC m=+1063.414811407" Sep 29 19:14:11 crc kubenswrapper[4792]: I0929 19:14:11.959407 4792 patch_prober.go:28] interesting pod/machine-config-daemon-p5q59 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 19:14:11 crc kubenswrapper[4792]: I0929 19:14:11.959696 4792 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" podUID="0ae66548-086e-4ca9-bd6f-281ce46e7557" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 19:14:11 crc kubenswrapper[4792]: I0929 19:14:11.959732 4792 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" Sep 29 19:14:11 crc kubenswrapper[4792]: I0929 19:14:11.960508 4792 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"5fe4636f526132681f79866adf93cfab5bd3a4171ad63c289794ff569221d1f4"} pod="openshift-machine-config-operator/machine-config-daemon-p5q59" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 29 19:14:11 crc kubenswrapper[4792]: I0929 19:14:11.960562 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" podUID="0ae66548-086e-4ca9-bd6f-281ce46e7557" containerName="machine-config-daemon" containerID="cri-o://5fe4636f526132681f79866adf93cfab5bd3a4171ad63c289794ff569221d1f4" gracePeriod=600 Sep 29 19:14:12 crc kubenswrapper[4792]: I0929 19:14:12.384626 4792 generic.go:334] "Generic (PLEG): container finished" podID="0ae66548-086e-4ca9-bd6f-281ce46e7557" containerID="5fe4636f526132681f79866adf93cfab5bd3a4171ad63c289794ff569221d1f4" exitCode=0 Sep 29 19:14:12 crc kubenswrapper[4792]: I0929 19:14:12.384677 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" event={"ID":"0ae66548-086e-4ca9-bd6f-281ce46e7557","Type":"ContainerDied","Data":"5fe4636f526132681f79866adf93cfab5bd3a4171ad63c289794ff569221d1f4"} Sep 29 19:14:12 crc kubenswrapper[4792]: I0929 19:14:12.385048 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" event={"ID":"0ae66548-086e-4ca9-bd6f-281ce46e7557","Type":"ContainerStarted","Data":"e8bc360625c05ed5b39b0bdabe37934fb480a91515b533db0262f5a58fa6cf95"} Sep 29 19:14:12 crc kubenswrapper[4792]: I0929 19:14:12.385069 4792 scope.go:117] "RemoveContainer" containerID="487246f4f6005415a540bc4c228e6bec5b9bf5f447044923f1e106cf7a0cba67" Sep 29 19:14:12 crc kubenswrapper[4792]: I0929 19:14:12.388143 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-zvckm" event={"ID":"321cc22b-3e6d-429f-aba5-d69c973d889e","Type":"ContainerStarted","Data":"38ef97823d16cdf919e501feea9f186132407b9940c55faaadaafa25cd0360ea"} Sep 29 19:14:12 crc kubenswrapper[4792]: I0929 19:14:12.388317 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-zvckm" Sep 29 19:14:12 crc kubenswrapper[4792]: I0929 19:14:12.394381 4792 generic.go:334] "Generic (PLEG): container finished" podID="904d363d-f0dc-4318-9f28-d06e374a4838" containerID="3019ba06e376e9096b0fe19a75a727f3452ec199503c92e8220405b558ea0ea2" exitCode=0 Sep 29 19:14:12 crc kubenswrapper[4792]: I0929 19:14:12.394840 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-mh2vn" event={"ID":"904d363d-f0dc-4318-9f28-d06e374a4838","Type":"ContainerDied","Data":"3019ba06e376e9096b0fe19a75a727f3452ec199503c92e8220405b558ea0ea2"} Sep 29 19:14:12 crc kubenswrapper[4792]: I0929 19:14:12.427778 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-zvckm" podStartSLOduration=21.838982799 podStartE2EDuration="27.427752143s" podCreationTimestamp="2025-09-29 19:13:45 +0000 UTC" firstStartedPulling="2025-09-29 19:14:05.365311145 +0000 UTC m=+1057.358618541" lastFinishedPulling="2025-09-29 19:14:10.954080489 +0000 UTC m=+1062.947387885" observedRunningTime="2025-09-29 19:14:12.419731522 +0000 UTC m=+1064.413038938" watchObservedRunningTime="2025-09-29 19:14:12.427752143 +0000 UTC m=+1064.421059559" Sep 29 19:14:13 crc kubenswrapper[4792]: I0929 19:14:13.408176 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"cf5405ae-97dd-404d-9b0c-4d0faaf961cb","Type":"ContainerStarted","Data":"14e4cd7eecd90fb1cd3ec718807f43255f22d3aea6c40d065dfa59e4cfaa29fd"} Sep 29 19:14:13 crc kubenswrapper[4792]: I0929 19:14:13.411020 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-mh2vn" event={"ID":"904d363d-f0dc-4318-9f28-d06e374a4838","Type":"ContainerStarted","Data":"9990c1d508c9c593501248501559be99188f9923a9c93fe432a0c9347b11fcfc"} Sep 29 19:14:13 crc kubenswrapper[4792]: I0929 19:14:13.411079 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-mh2vn" event={"ID":"904d363d-f0dc-4318-9f28-d06e374a4838","Type":"ContainerStarted","Data":"cd47c7b44eefedd5c295d1b9c3378899cec058cb7285c71b71479a05416cf330"} Sep 29 19:14:13 crc kubenswrapper[4792]: I0929 19:14:13.452679 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-ovs-mh2vn" podStartSLOduration=22.912714218 podStartE2EDuration="28.452654947s" podCreationTimestamp="2025-09-29 19:13:45 +0000 UTC" firstStartedPulling="2025-09-29 19:14:05.361413562 +0000 UTC m=+1057.354720958" lastFinishedPulling="2025-09-29 19:14:10.901354291 +0000 UTC m=+1062.894661687" observedRunningTime="2025-09-29 19:14:13.447031219 +0000 UTC m=+1065.440338625" watchObservedRunningTime="2025-09-29 19:14:13.452654947 +0000 UTC m=+1065.445962373" Sep 29 19:14:14 crc kubenswrapper[4792]: I0929 19:14:14.417236 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-ovs-mh2vn" Sep 29 19:14:14 crc kubenswrapper[4792]: I0929 19:14:14.417569 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-ovs-mh2vn" Sep 29 19:14:15 crc kubenswrapper[4792]: E0929 19:14:15.415730 4792 upgradeaware.go:427] Error proxying data from client to backend: readfrom tcp 38.102.83.115:37514->38.102.83.115:32997: write tcp 38.102.83.115:37514->38.102.83.115:32997: write: broken pipe Sep 29 19:14:15 crc kubenswrapper[4792]: I0929 19:14:15.425966 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"62bc84b7-9b21-447c-b1c3-21c4f178ba26","Type":"ContainerStarted","Data":"b824c6459e83975ad329f7367ab1eeb34ec3ddd56a5772f86a8492a07ef970d8"} Sep 29 19:14:15 crc kubenswrapper[4792]: I0929 19:14:15.429671 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"272430d7-51cd-4f45-bfdd-73ed83ab0bc2","Type":"ContainerStarted","Data":"75d8bd15aa235ab623f2fbd00529fdcae5614d224256ae9875fd50ede82436e1"} Sep 29 19:14:15 crc kubenswrapper[4792]: I0929 19:14:15.433610 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"da980a6f-8dcf-4d5b-a972-fc646865967c","Type":"ContainerStarted","Data":"764cdc418c9adf7d17a1e2d1da5f9af48a77b134525fa6f02e85507a64667860"} Sep 29 19:14:15 crc kubenswrapper[4792]: I0929 19:14:15.504726 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-sb-0" podStartSLOduration=19.408187141 podStartE2EDuration="28.504710902s" podCreationTimestamp="2025-09-29 19:13:47 +0000 UTC" firstStartedPulling="2025-09-29 19:14:05.940121463 +0000 UTC m=+1057.933428859" lastFinishedPulling="2025-09-29 19:14:15.036645224 +0000 UTC m=+1067.029952620" observedRunningTime="2025-09-29 19:14:15.491513605 +0000 UTC m=+1067.484821011" watchObservedRunningTime="2025-09-29 19:14:15.504710902 +0000 UTC m=+1067.498018298" Sep 29 19:14:15 crc kubenswrapper[4792]: I0929 19:14:15.521422 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-nb-0" podStartSLOduration=22.434768641 podStartE2EDuration="31.521407392s" podCreationTimestamp="2025-09-29 19:13:44 +0000 UTC" firstStartedPulling="2025-09-29 19:14:06.062492473 +0000 UTC m=+1058.055799869" lastFinishedPulling="2025-09-29 19:14:15.149131224 +0000 UTC m=+1067.142438620" observedRunningTime="2025-09-29 19:14:15.519487161 +0000 UTC m=+1067.512794577" watchObservedRunningTime="2025-09-29 19:14:15.521407392 +0000 UTC m=+1067.514714798" Sep 29 19:14:15 crc kubenswrapper[4792]: I0929 19:14:15.700408 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-nb-0" Sep 29 19:14:15 crc kubenswrapper[4792]: I0929 19:14:15.700499 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-nb-0" Sep 29 19:14:15 crc kubenswrapper[4792]: I0929 19:14:15.742742 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-nb-0" Sep 29 19:14:16 crc kubenswrapper[4792]: I0929 19:14:16.478153 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-nb-0" Sep 29 19:14:16 crc kubenswrapper[4792]: I0929 19:14:16.748627 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-n5gmr"] Sep 29 19:14:16 crc kubenswrapper[4792]: I0929 19:14:16.791087 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5bf47b49b7-qnpm9"] Sep 29 19:14:16 crc kubenswrapper[4792]: I0929 19:14:16.792616 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5bf47b49b7-qnpm9" Sep 29 19:14:16 crc kubenswrapper[4792]: I0929 19:14:16.807101 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovsdbserver-nb" Sep 29 19:14:16 crc kubenswrapper[4792]: I0929 19:14:16.827141 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-metrics-jcjdl"] Sep 29 19:14:16 crc kubenswrapper[4792]: I0929 19:14:16.828505 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-metrics-jcjdl" Sep 29 19:14:16 crc kubenswrapper[4792]: I0929 19:14:16.830987 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-metrics-config" Sep 29 19:14:16 crc kubenswrapper[4792]: I0929 19:14:16.843908 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5bf47b49b7-qnpm9"] Sep 29 19:14:16 crc kubenswrapper[4792]: I0929 19:14:16.850750 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-metrics-jcjdl"] Sep 29 19:14:16 crc kubenswrapper[4792]: I0929 19:14:16.966625 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-76pnr\" (UniqueName: \"kubernetes.io/projected/51d7ae2f-4cac-4245-b001-91413652f89e-kube-api-access-76pnr\") pod \"ovn-controller-metrics-jcjdl\" (UID: \"51d7ae2f-4cac-4245-b001-91413652f89e\") " pod="openstack/ovn-controller-metrics-jcjdl" Sep 29 19:14:16 crc kubenswrapper[4792]: I0929 19:14:16.966966 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2g9vc\" (UniqueName: \"kubernetes.io/projected/21b8a7a0-b819-46ff-9c50-19ede0e7d888-kube-api-access-2g9vc\") pod \"dnsmasq-dns-5bf47b49b7-qnpm9\" (UID: \"21b8a7a0-b819-46ff-9c50-19ede0e7d888\") " pod="openstack/dnsmasq-dns-5bf47b49b7-qnpm9" Sep 29 19:14:16 crc kubenswrapper[4792]: I0929 19:14:16.967006 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/21b8a7a0-b819-46ff-9c50-19ede0e7d888-dns-svc\") pod \"dnsmasq-dns-5bf47b49b7-qnpm9\" (UID: \"21b8a7a0-b819-46ff-9c50-19ede0e7d888\") " pod="openstack/dnsmasq-dns-5bf47b49b7-qnpm9" Sep 29 19:14:16 crc kubenswrapper[4792]: I0929 19:14:16.967026 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/21b8a7a0-b819-46ff-9c50-19ede0e7d888-ovsdbserver-nb\") pod \"dnsmasq-dns-5bf47b49b7-qnpm9\" (UID: \"21b8a7a0-b819-46ff-9c50-19ede0e7d888\") " pod="openstack/dnsmasq-dns-5bf47b49b7-qnpm9" Sep 29 19:14:16 crc kubenswrapper[4792]: I0929 19:14:16.967055 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/51d7ae2f-4cac-4245-b001-91413652f89e-ovn-rundir\") pod \"ovn-controller-metrics-jcjdl\" (UID: \"51d7ae2f-4cac-4245-b001-91413652f89e\") " pod="openstack/ovn-controller-metrics-jcjdl" Sep 29 19:14:16 crc kubenswrapper[4792]: I0929 19:14:16.967291 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/51d7ae2f-4cac-4245-b001-91413652f89e-ovs-rundir\") pod \"ovn-controller-metrics-jcjdl\" (UID: \"51d7ae2f-4cac-4245-b001-91413652f89e\") " pod="openstack/ovn-controller-metrics-jcjdl" Sep 29 19:14:16 crc kubenswrapper[4792]: I0929 19:14:16.967307 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/21b8a7a0-b819-46ff-9c50-19ede0e7d888-config\") pod \"dnsmasq-dns-5bf47b49b7-qnpm9\" (UID: \"21b8a7a0-b819-46ff-9c50-19ede0e7d888\") " pod="openstack/dnsmasq-dns-5bf47b49b7-qnpm9" Sep 29 19:14:16 crc kubenswrapper[4792]: I0929 19:14:16.967329 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/51d7ae2f-4cac-4245-b001-91413652f89e-combined-ca-bundle\") pod \"ovn-controller-metrics-jcjdl\" (UID: \"51d7ae2f-4cac-4245-b001-91413652f89e\") " pod="openstack/ovn-controller-metrics-jcjdl" Sep 29 19:14:16 crc kubenswrapper[4792]: I0929 19:14:16.967350 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/51d7ae2f-4cac-4245-b001-91413652f89e-config\") pod \"ovn-controller-metrics-jcjdl\" (UID: \"51d7ae2f-4cac-4245-b001-91413652f89e\") " pod="openstack/ovn-controller-metrics-jcjdl" Sep 29 19:14:16 crc kubenswrapper[4792]: I0929 19:14:16.967374 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/51d7ae2f-4cac-4245-b001-91413652f89e-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-jcjdl\" (UID: \"51d7ae2f-4cac-4245-b001-91413652f89e\") " pod="openstack/ovn-controller-metrics-jcjdl" Sep 29 19:14:17 crc kubenswrapper[4792]: I0929 19:14:17.083076 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/51d7ae2f-4cac-4245-b001-91413652f89e-ovs-rundir\") pod \"ovn-controller-metrics-jcjdl\" (UID: \"51d7ae2f-4cac-4245-b001-91413652f89e\") " pod="openstack/ovn-controller-metrics-jcjdl" Sep 29 19:14:17 crc kubenswrapper[4792]: I0929 19:14:17.083143 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/21b8a7a0-b819-46ff-9c50-19ede0e7d888-config\") pod \"dnsmasq-dns-5bf47b49b7-qnpm9\" (UID: \"21b8a7a0-b819-46ff-9c50-19ede0e7d888\") " pod="openstack/dnsmasq-dns-5bf47b49b7-qnpm9" Sep 29 19:14:17 crc kubenswrapper[4792]: I0929 19:14:17.083533 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/51d7ae2f-4cac-4245-b001-91413652f89e-combined-ca-bundle\") pod \"ovn-controller-metrics-jcjdl\" (UID: \"51d7ae2f-4cac-4245-b001-91413652f89e\") " pod="openstack/ovn-controller-metrics-jcjdl" Sep 29 19:14:17 crc kubenswrapper[4792]: I0929 19:14:17.083656 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/51d7ae2f-4cac-4245-b001-91413652f89e-config\") pod \"ovn-controller-metrics-jcjdl\" (UID: \"51d7ae2f-4cac-4245-b001-91413652f89e\") " pod="openstack/ovn-controller-metrics-jcjdl" Sep 29 19:14:17 crc kubenswrapper[4792]: I0929 19:14:17.083775 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/51d7ae2f-4cac-4245-b001-91413652f89e-ovs-rundir\") pod \"ovn-controller-metrics-jcjdl\" (UID: \"51d7ae2f-4cac-4245-b001-91413652f89e\") " pod="openstack/ovn-controller-metrics-jcjdl" Sep 29 19:14:17 crc kubenswrapper[4792]: I0929 19:14:17.084419 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/51d7ae2f-4cac-4245-b001-91413652f89e-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-jcjdl\" (UID: \"51d7ae2f-4cac-4245-b001-91413652f89e\") " pod="openstack/ovn-controller-metrics-jcjdl" Sep 29 19:14:17 crc kubenswrapper[4792]: I0929 19:14:17.084504 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-76pnr\" (UniqueName: \"kubernetes.io/projected/51d7ae2f-4cac-4245-b001-91413652f89e-kube-api-access-76pnr\") pod \"ovn-controller-metrics-jcjdl\" (UID: \"51d7ae2f-4cac-4245-b001-91413652f89e\") " pod="openstack/ovn-controller-metrics-jcjdl" Sep 29 19:14:17 crc kubenswrapper[4792]: I0929 19:14:17.084562 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2g9vc\" (UniqueName: \"kubernetes.io/projected/21b8a7a0-b819-46ff-9c50-19ede0e7d888-kube-api-access-2g9vc\") pod \"dnsmasq-dns-5bf47b49b7-qnpm9\" (UID: \"21b8a7a0-b819-46ff-9c50-19ede0e7d888\") " pod="openstack/dnsmasq-dns-5bf47b49b7-qnpm9" Sep 29 19:14:17 crc kubenswrapper[4792]: I0929 19:14:17.084627 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/21b8a7a0-b819-46ff-9c50-19ede0e7d888-dns-svc\") pod \"dnsmasq-dns-5bf47b49b7-qnpm9\" (UID: \"21b8a7a0-b819-46ff-9c50-19ede0e7d888\") " pod="openstack/dnsmasq-dns-5bf47b49b7-qnpm9" Sep 29 19:14:17 crc kubenswrapper[4792]: I0929 19:14:17.084645 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/21b8a7a0-b819-46ff-9c50-19ede0e7d888-ovsdbserver-nb\") pod \"dnsmasq-dns-5bf47b49b7-qnpm9\" (UID: \"21b8a7a0-b819-46ff-9c50-19ede0e7d888\") " pod="openstack/dnsmasq-dns-5bf47b49b7-qnpm9" Sep 29 19:14:17 crc kubenswrapper[4792]: I0929 19:14:17.084704 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/51d7ae2f-4cac-4245-b001-91413652f89e-ovn-rundir\") pod \"ovn-controller-metrics-jcjdl\" (UID: \"51d7ae2f-4cac-4245-b001-91413652f89e\") " pod="openstack/ovn-controller-metrics-jcjdl" Sep 29 19:14:17 crc kubenswrapper[4792]: I0929 19:14:17.087722 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/21b8a7a0-b819-46ff-9c50-19ede0e7d888-dns-svc\") pod \"dnsmasq-dns-5bf47b49b7-qnpm9\" (UID: \"21b8a7a0-b819-46ff-9c50-19ede0e7d888\") " pod="openstack/dnsmasq-dns-5bf47b49b7-qnpm9" Sep 29 19:14:17 crc kubenswrapper[4792]: I0929 19:14:17.088533 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/51d7ae2f-4cac-4245-b001-91413652f89e-config\") pod \"ovn-controller-metrics-jcjdl\" (UID: \"51d7ae2f-4cac-4245-b001-91413652f89e\") " pod="openstack/ovn-controller-metrics-jcjdl" Sep 29 19:14:17 crc kubenswrapper[4792]: I0929 19:14:17.089537 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/51d7ae2f-4cac-4245-b001-91413652f89e-ovn-rundir\") pod \"ovn-controller-metrics-jcjdl\" (UID: \"51d7ae2f-4cac-4245-b001-91413652f89e\") " pod="openstack/ovn-controller-metrics-jcjdl" Sep 29 19:14:17 crc kubenswrapper[4792]: I0929 19:14:17.095339 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/21b8a7a0-b819-46ff-9c50-19ede0e7d888-config\") pod \"dnsmasq-dns-5bf47b49b7-qnpm9\" (UID: \"21b8a7a0-b819-46ff-9c50-19ede0e7d888\") " pod="openstack/dnsmasq-dns-5bf47b49b7-qnpm9" Sep 29 19:14:17 crc kubenswrapper[4792]: I0929 19:14:17.095939 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/51d7ae2f-4cac-4245-b001-91413652f89e-combined-ca-bundle\") pod \"ovn-controller-metrics-jcjdl\" (UID: \"51d7ae2f-4cac-4245-b001-91413652f89e\") " pod="openstack/ovn-controller-metrics-jcjdl" Sep 29 19:14:17 crc kubenswrapper[4792]: I0929 19:14:17.096144 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/21b8a7a0-b819-46ff-9c50-19ede0e7d888-ovsdbserver-nb\") pod \"dnsmasq-dns-5bf47b49b7-qnpm9\" (UID: \"21b8a7a0-b819-46ff-9c50-19ede0e7d888\") " pod="openstack/dnsmasq-dns-5bf47b49b7-qnpm9" Sep 29 19:14:17 crc kubenswrapper[4792]: I0929 19:14:17.098274 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/51d7ae2f-4cac-4245-b001-91413652f89e-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-jcjdl\" (UID: \"51d7ae2f-4cac-4245-b001-91413652f89e\") " pod="openstack/ovn-controller-metrics-jcjdl" Sep 29 19:14:17 crc kubenswrapper[4792]: I0929 19:14:17.135578 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2g9vc\" (UniqueName: \"kubernetes.io/projected/21b8a7a0-b819-46ff-9c50-19ede0e7d888-kube-api-access-2g9vc\") pod \"dnsmasq-dns-5bf47b49b7-qnpm9\" (UID: \"21b8a7a0-b819-46ff-9c50-19ede0e7d888\") " pod="openstack/dnsmasq-dns-5bf47b49b7-qnpm9" Sep 29 19:14:17 crc kubenswrapper[4792]: I0929 19:14:17.147386 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-76pnr\" (UniqueName: \"kubernetes.io/projected/51d7ae2f-4cac-4245-b001-91413652f89e-kube-api-access-76pnr\") pod \"ovn-controller-metrics-jcjdl\" (UID: \"51d7ae2f-4cac-4245-b001-91413652f89e\") " pod="openstack/ovn-controller-metrics-jcjdl" Sep 29 19:14:17 crc kubenswrapper[4792]: I0929 19:14:17.166677 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-metrics-jcjdl" Sep 29 19:14:17 crc kubenswrapper[4792]: I0929 19:14:17.194017 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57d769cc4f-n5gmr" Sep 29 19:14:17 crc kubenswrapper[4792]: I0929 19:14:17.207998 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-hwsf9"] Sep 29 19:14:17 crc kubenswrapper[4792]: I0929 19:14:17.247667 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-8554648995-lp2qs"] Sep 29 19:14:17 crc kubenswrapper[4792]: I0929 19:14:17.251859 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8554648995-lp2qs" Sep 29 19:14:17 crc kubenswrapper[4792]: I0929 19:14:17.265500 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovsdbserver-sb" Sep 29 19:14:17 crc kubenswrapper[4792]: I0929 19:14:17.296509 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-8554648995-lp2qs"] Sep 29 19:14:17 crc kubenswrapper[4792]: I0929 19:14:17.388538 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/79e97193-edc8-43b3-a482-b3e3a0354cb5-config\") pod \"79e97193-edc8-43b3-a482-b3e3a0354cb5\" (UID: \"79e97193-edc8-43b3-a482-b3e3a0354cb5\") " Sep 29 19:14:17 crc kubenswrapper[4792]: I0929 19:14:17.388762 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/79e97193-edc8-43b3-a482-b3e3a0354cb5-dns-svc\") pod \"79e97193-edc8-43b3-a482-b3e3a0354cb5\" (UID: \"79e97193-edc8-43b3-a482-b3e3a0354cb5\") " Sep 29 19:14:17 crc kubenswrapper[4792]: I0929 19:14:17.388874 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mzvls\" (UniqueName: \"kubernetes.io/projected/79e97193-edc8-43b3-a482-b3e3a0354cb5-kube-api-access-mzvls\") pod \"79e97193-edc8-43b3-a482-b3e3a0354cb5\" (UID: \"79e97193-edc8-43b3-a482-b3e3a0354cb5\") " Sep 29 19:14:17 crc kubenswrapper[4792]: I0929 19:14:17.389018 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/79e97193-edc8-43b3-a482-b3e3a0354cb5-config" (OuterVolumeSpecName: "config") pod "79e97193-edc8-43b3-a482-b3e3a0354cb5" (UID: "79e97193-edc8-43b3-a482-b3e3a0354cb5"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:14:17 crc kubenswrapper[4792]: I0929 19:14:17.389089 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k6v55\" (UniqueName: \"kubernetes.io/projected/3e5e32d1-fcfa-4745-a78a-f20d65ff7e2b-kube-api-access-k6v55\") pod \"dnsmasq-dns-8554648995-lp2qs\" (UID: \"3e5e32d1-fcfa-4745-a78a-f20d65ff7e2b\") " pod="openstack/dnsmasq-dns-8554648995-lp2qs" Sep 29 19:14:17 crc kubenswrapper[4792]: I0929 19:14:17.389197 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/3e5e32d1-fcfa-4745-a78a-f20d65ff7e2b-ovsdbserver-nb\") pod \"dnsmasq-dns-8554648995-lp2qs\" (UID: \"3e5e32d1-fcfa-4745-a78a-f20d65ff7e2b\") " pod="openstack/dnsmasq-dns-8554648995-lp2qs" Sep 29 19:14:17 crc kubenswrapper[4792]: I0929 19:14:17.389244 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/3e5e32d1-fcfa-4745-a78a-f20d65ff7e2b-dns-svc\") pod \"dnsmasq-dns-8554648995-lp2qs\" (UID: \"3e5e32d1-fcfa-4745-a78a-f20d65ff7e2b\") " pod="openstack/dnsmasq-dns-8554648995-lp2qs" Sep 29 19:14:17 crc kubenswrapper[4792]: I0929 19:14:17.389291 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/3e5e32d1-fcfa-4745-a78a-f20d65ff7e2b-ovsdbserver-sb\") pod \"dnsmasq-dns-8554648995-lp2qs\" (UID: \"3e5e32d1-fcfa-4745-a78a-f20d65ff7e2b\") " pod="openstack/dnsmasq-dns-8554648995-lp2qs" Sep 29 19:14:17 crc kubenswrapper[4792]: I0929 19:14:17.389343 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3e5e32d1-fcfa-4745-a78a-f20d65ff7e2b-config\") pod \"dnsmasq-dns-8554648995-lp2qs\" (UID: \"3e5e32d1-fcfa-4745-a78a-f20d65ff7e2b\") " pod="openstack/dnsmasq-dns-8554648995-lp2qs" Sep 29 19:14:17 crc kubenswrapper[4792]: I0929 19:14:17.389404 4792 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/79e97193-edc8-43b3-a482-b3e3a0354cb5-config\") on node \"crc\" DevicePath \"\"" Sep 29 19:14:17 crc kubenswrapper[4792]: I0929 19:14:17.389455 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/79e97193-edc8-43b3-a482-b3e3a0354cb5-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "79e97193-edc8-43b3-a482-b3e3a0354cb5" (UID: "79e97193-edc8-43b3-a482-b3e3a0354cb5"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:14:17 crc kubenswrapper[4792]: I0929 19:14:17.398087 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/79e97193-edc8-43b3-a482-b3e3a0354cb5-kube-api-access-mzvls" (OuterVolumeSpecName: "kube-api-access-mzvls") pod "79e97193-edc8-43b3-a482-b3e3a0354cb5" (UID: "79e97193-edc8-43b3-a482-b3e3a0354cb5"). InnerVolumeSpecName "kube-api-access-mzvls". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:14:17 crc kubenswrapper[4792]: I0929 19:14:17.420717 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5bf47b49b7-qnpm9" Sep 29 19:14:17 crc kubenswrapper[4792]: I0929 19:14:17.462398 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57d769cc4f-n5gmr" Sep 29 19:14:17 crc kubenswrapper[4792]: I0929 19:14:17.463019 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57d769cc4f-n5gmr" event={"ID":"79e97193-edc8-43b3-a482-b3e3a0354cb5","Type":"ContainerDied","Data":"c5fa26ff71f805e2c59f3d61b73a629dfe4944fe7b54217988421554f451b24c"} Sep 29 19:14:17 crc kubenswrapper[4792]: I0929 19:14:17.490543 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/3e5e32d1-fcfa-4745-a78a-f20d65ff7e2b-ovsdbserver-sb\") pod \"dnsmasq-dns-8554648995-lp2qs\" (UID: \"3e5e32d1-fcfa-4745-a78a-f20d65ff7e2b\") " pod="openstack/dnsmasq-dns-8554648995-lp2qs" Sep 29 19:14:17 crc kubenswrapper[4792]: I0929 19:14:17.490627 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3e5e32d1-fcfa-4745-a78a-f20d65ff7e2b-config\") pod \"dnsmasq-dns-8554648995-lp2qs\" (UID: \"3e5e32d1-fcfa-4745-a78a-f20d65ff7e2b\") " pod="openstack/dnsmasq-dns-8554648995-lp2qs" Sep 29 19:14:17 crc kubenswrapper[4792]: I0929 19:14:17.490663 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k6v55\" (UniqueName: \"kubernetes.io/projected/3e5e32d1-fcfa-4745-a78a-f20d65ff7e2b-kube-api-access-k6v55\") pod \"dnsmasq-dns-8554648995-lp2qs\" (UID: \"3e5e32d1-fcfa-4745-a78a-f20d65ff7e2b\") " pod="openstack/dnsmasq-dns-8554648995-lp2qs" Sep 29 19:14:17 crc kubenswrapper[4792]: I0929 19:14:17.490743 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/3e5e32d1-fcfa-4745-a78a-f20d65ff7e2b-ovsdbserver-nb\") pod \"dnsmasq-dns-8554648995-lp2qs\" (UID: \"3e5e32d1-fcfa-4745-a78a-f20d65ff7e2b\") " pod="openstack/dnsmasq-dns-8554648995-lp2qs" Sep 29 19:14:17 crc kubenswrapper[4792]: I0929 19:14:17.490804 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/3e5e32d1-fcfa-4745-a78a-f20d65ff7e2b-dns-svc\") pod \"dnsmasq-dns-8554648995-lp2qs\" (UID: \"3e5e32d1-fcfa-4745-a78a-f20d65ff7e2b\") " pod="openstack/dnsmasq-dns-8554648995-lp2qs" Sep 29 19:14:17 crc kubenswrapper[4792]: I0929 19:14:17.490871 4792 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/79e97193-edc8-43b3-a482-b3e3a0354cb5-dns-svc\") on node \"crc\" DevicePath \"\"" Sep 29 19:14:17 crc kubenswrapper[4792]: I0929 19:14:17.490890 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mzvls\" (UniqueName: \"kubernetes.io/projected/79e97193-edc8-43b3-a482-b3e3a0354cb5-kube-api-access-mzvls\") on node \"crc\" DevicePath \"\"" Sep 29 19:14:17 crc kubenswrapper[4792]: I0929 19:14:17.491913 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/3e5e32d1-fcfa-4745-a78a-f20d65ff7e2b-dns-svc\") pod \"dnsmasq-dns-8554648995-lp2qs\" (UID: \"3e5e32d1-fcfa-4745-a78a-f20d65ff7e2b\") " pod="openstack/dnsmasq-dns-8554648995-lp2qs" Sep 29 19:14:17 crc kubenswrapper[4792]: I0929 19:14:17.492508 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3e5e32d1-fcfa-4745-a78a-f20d65ff7e2b-config\") pod \"dnsmasq-dns-8554648995-lp2qs\" (UID: \"3e5e32d1-fcfa-4745-a78a-f20d65ff7e2b\") " pod="openstack/dnsmasq-dns-8554648995-lp2qs" Sep 29 19:14:17 crc kubenswrapper[4792]: I0929 19:14:17.493208 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/3e5e32d1-fcfa-4745-a78a-f20d65ff7e2b-ovsdbserver-sb\") pod \"dnsmasq-dns-8554648995-lp2qs\" (UID: \"3e5e32d1-fcfa-4745-a78a-f20d65ff7e2b\") " pod="openstack/dnsmasq-dns-8554648995-lp2qs" Sep 29 19:14:17 crc kubenswrapper[4792]: I0929 19:14:17.494366 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/3e5e32d1-fcfa-4745-a78a-f20d65ff7e2b-ovsdbserver-nb\") pod \"dnsmasq-dns-8554648995-lp2qs\" (UID: \"3e5e32d1-fcfa-4745-a78a-f20d65ff7e2b\") " pod="openstack/dnsmasq-dns-8554648995-lp2qs" Sep 29 19:14:17 crc kubenswrapper[4792]: I0929 19:14:17.540032 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k6v55\" (UniqueName: \"kubernetes.io/projected/3e5e32d1-fcfa-4745-a78a-f20d65ff7e2b-kube-api-access-k6v55\") pod \"dnsmasq-dns-8554648995-lp2qs\" (UID: \"3e5e32d1-fcfa-4745-a78a-f20d65ff7e2b\") " pod="openstack/dnsmasq-dns-8554648995-lp2qs" Sep 29 19:14:17 crc kubenswrapper[4792]: I0929 19:14:17.599227 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8554648995-lp2qs" Sep 29 19:14:17 crc kubenswrapper[4792]: I0929 19:14:17.624297 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-n5gmr"] Sep 29 19:14:17 crc kubenswrapper[4792]: I0929 19:14:17.638285 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-n5gmr"] Sep 29 19:14:17 crc kubenswrapper[4792]: I0929 19:14:17.665129 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-666b6646f7-hwsf9" Sep 29 19:14:17 crc kubenswrapper[4792]: I0929 19:14:17.804530 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a70ef7c9-6099-4acc-9a08-cc4a74aa17bb-config\") pod \"a70ef7c9-6099-4acc-9a08-cc4a74aa17bb\" (UID: \"a70ef7c9-6099-4acc-9a08-cc4a74aa17bb\") " Sep 29 19:14:17 crc kubenswrapper[4792]: I0929 19:14:17.804999 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-t6rnk\" (UniqueName: \"kubernetes.io/projected/a70ef7c9-6099-4acc-9a08-cc4a74aa17bb-kube-api-access-t6rnk\") pod \"a70ef7c9-6099-4acc-9a08-cc4a74aa17bb\" (UID: \"a70ef7c9-6099-4acc-9a08-cc4a74aa17bb\") " Sep 29 19:14:17 crc kubenswrapper[4792]: I0929 19:14:17.805036 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a70ef7c9-6099-4acc-9a08-cc4a74aa17bb-dns-svc\") pod \"a70ef7c9-6099-4acc-9a08-cc4a74aa17bb\" (UID: \"a70ef7c9-6099-4acc-9a08-cc4a74aa17bb\") " Sep 29 19:14:17 crc kubenswrapper[4792]: I0929 19:14:17.806086 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a70ef7c9-6099-4acc-9a08-cc4a74aa17bb-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "a70ef7c9-6099-4acc-9a08-cc4a74aa17bb" (UID: "a70ef7c9-6099-4acc-9a08-cc4a74aa17bb"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:14:17 crc kubenswrapper[4792]: I0929 19:14:17.807055 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a70ef7c9-6099-4acc-9a08-cc4a74aa17bb-config" (OuterVolumeSpecName: "config") pod "a70ef7c9-6099-4acc-9a08-cc4a74aa17bb" (UID: "a70ef7c9-6099-4acc-9a08-cc4a74aa17bb"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:14:17 crc kubenswrapper[4792]: I0929 19:14:17.811065 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a70ef7c9-6099-4acc-9a08-cc4a74aa17bb-kube-api-access-t6rnk" (OuterVolumeSpecName: "kube-api-access-t6rnk") pod "a70ef7c9-6099-4acc-9a08-cc4a74aa17bb" (UID: "a70ef7c9-6099-4acc-9a08-cc4a74aa17bb"). InnerVolumeSpecName "kube-api-access-t6rnk". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:14:17 crc kubenswrapper[4792]: I0929 19:14:17.868199 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-metrics-jcjdl"] Sep 29 19:14:17 crc kubenswrapper[4792]: I0929 19:14:17.907231 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-t6rnk\" (UniqueName: \"kubernetes.io/projected/a70ef7c9-6099-4acc-9a08-cc4a74aa17bb-kube-api-access-t6rnk\") on node \"crc\" DevicePath \"\"" Sep 29 19:14:17 crc kubenswrapper[4792]: I0929 19:14:17.907270 4792 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a70ef7c9-6099-4acc-9a08-cc4a74aa17bb-dns-svc\") on node \"crc\" DevicePath \"\"" Sep 29 19:14:17 crc kubenswrapper[4792]: I0929 19:14:17.907282 4792 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a70ef7c9-6099-4acc-9a08-cc4a74aa17bb-config\") on node \"crc\" DevicePath \"\"" Sep 29 19:14:18 crc kubenswrapper[4792]: I0929 19:14:18.009580 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5bf47b49b7-qnpm9"] Sep 29 19:14:18 crc kubenswrapper[4792]: I0929 19:14:18.087317 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-8554648995-lp2qs"] Sep 29 19:14:18 crc kubenswrapper[4792]: W0929 19:14:18.104246 4792 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3e5e32d1_fcfa_4745_a78a_f20d65ff7e2b.slice/crio-eb53a866ca28fe9f4626821a1c6b4a4d4b99ce88bb3cdec8bb199372656c8e29 WatchSource:0}: Error finding container eb53a866ca28fe9f4626821a1c6b4a4d4b99ce88bb3cdec8bb199372656c8e29: Status 404 returned error can't find the container with id eb53a866ca28fe9f4626821a1c6b4a4d4b99ce88bb3cdec8bb199372656c8e29 Sep 29 19:14:18 crc kubenswrapper[4792]: I0929 19:14:18.378485 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-sb-0" Sep 29 19:14:18 crc kubenswrapper[4792]: I0929 19:14:18.380035 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-sb-0" Sep 29 19:14:18 crc kubenswrapper[4792]: I0929 19:14:18.423781 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-sb-0" Sep 29 19:14:18 crc kubenswrapper[4792]: I0929 19:14:18.469113 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-metrics-jcjdl" event={"ID":"51d7ae2f-4cac-4245-b001-91413652f89e","Type":"ContainerStarted","Data":"886201ee82b87ce3b4e181415f071ec7c4e77f90b6087fda047eecbfa9ea8353"} Sep 29 19:14:18 crc kubenswrapper[4792]: I0929 19:14:18.470838 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8554648995-lp2qs" event={"ID":"3e5e32d1-fcfa-4745-a78a-f20d65ff7e2b","Type":"ContainerStarted","Data":"eb53a866ca28fe9f4626821a1c6b4a4d4b99ce88bb3cdec8bb199372656c8e29"} Sep 29 19:14:18 crc kubenswrapper[4792]: I0929 19:14:18.472248 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5bf47b49b7-qnpm9" event={"ID":"21b8a7a0-b819-46ff-9c50-19ede0e7d888","Type":"ContainerStarted","Data":"8be46d7d8398f2e12a37aa648adfc380611f5d6fe4d6aabf853d55e42d852cd4"} Sep 29 19:14:18 crc kubenswrapper[4792]: I0929 19:14:18.474364 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-666b6646f7-hwsf9" Sep 29 19:14:18 crc kubenswrapper[4792]: I0929 19:14:18.477053 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-666b6646f7-hwsf9" event={"ID":"a70ef7c9-6099-4acc-9a08-cc4a74aa17bb","Type":"ContainerDied","Data":"818e10797eb8377db26910649781f06c9466be013324495e51d4807e8cfe543f"} Sep 29 19:14:18 crc kubenswrapper[4792]: I0929 19:14:18.564149 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-hwsf9"] Sep 29 19:14:18 crc kubenswrapper[4792]: I0929 19:14:18.573541 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-hwsf9"] Sep 29 19:14:18 crc kubenswrapper[4792]: I0929 19:14:18.643558 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-sb-0" Sep 29 19:14:18 crc kubenswrapper[4792]: I0929 19:14:18.866144 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/openstack-cell1-galera-0" Sep 29 19:14:18 crc kubenswrapper[4792]: I0929 19:14:18.866180 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/openstack-cell1-galera-0" Sep 29 19:14:18 crc kubenswrapper[4792]: I0929 19:14:18.889770 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-northd-0"] Sep 29 19:14:18 crc kubenswrapper[4792]: I0929 19:14:18.891716 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Sep 29 19:14:18 crc kubenswrapper[4792]: I0929 19:14:18.894205 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovnnorthd-ovndbs" Sep 29 19:14:18 crc kubenswrapper[4792]: I0929 19:14:18.894402 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovnnorthd-ovnnorthd-dockercfg-5z8m4" Sep 29 19:14:18 crc kubenswrapper[4792]: I0929 19:14:18.894631 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovnnorthd-scripts" Sep 29 19:14:18 crc kubenswrapper[4792]: I0929 19:14:18.894835 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovnnorthd-config" Sep 29 19:14:18 crc kubenswrapper[4792]: I0929 19:14:18.911207 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-northd-0"] Sep 29 19:14:18 crc kubenswrapper[4792]: I0929 19:14:18.920782 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/openstack-galera-0" Sep 29 19:14:18 crc kubenswrapper[4792]: I0929 19:14:18.920819 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/openstack-galera-0" Sep 29 19:14:19 crc kubenswrapper[4792]: I0929 19:14:19.027239 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="79e97193-edc8-43b3-a482-b3e3a0354cb5" path="/var/lib/kubelet/pods/79e97193-edc8-43b3-a482-b3e3a0354cb5/volumes" Sep 29 19:14:19 crc kubenswrapper[4792]: I0929 19:14:19.027576 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a70ef7c9-6099-4acc-9a08-cc4a74aa17bb" path="/var/lib/kubelet/pods/a70ef7c9-6099-4acc-9a08-cc4a74aa17bb/volumes" Sep 29 19:14:19 crc kubenswrapper[4792]: I0929 19:14:19.046955 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/934bc291-1ca4-4155-bb99-b3fde7a0d5e5-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"934bc291-1ca4-4155-bb99-b3fde7a0d5e5\") " pod="openstack/ovn-northd-0" Sep 29 19:14:19 crc kubenswrapper[4792]: I0929 19:14:19.047021 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/934bc291-1ca4-4155-bb99-b3fde7a0d5e5-config\") pod \"ovn-northd-0\" (UID: \"934bc291-1ca4-4155-bb99-b3fde7a0d5e5\") " pod="openstack/ovn-northd-0" Sep 29 19:14:19 crc kubenswrapper[4792]: I0929 19:14:19.047056 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/934bc291-1ca4-4155-bb99-b3fde7a0d5e5-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"934bc291-1ca4-4155-bb99-b3fde7a0d5e5\") " pod="openstack/ovn-northd-0" Sep 29 19:14:19 crc kubenswrapper[4792]: I0929 19:14:19.047156 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/934bc291-1ca4-4155-bb99-b3fde7a0d5e5-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"934bc291-1ca4-4155-bb99-b3fde7a0d5e5\") " pod="openstack/ovn-northd-0" Sep 29 19:14:19 crc kubenswrapper[4792]: I0929 19:14:19.047303 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vh8w4\" (UniqueName: \"kubernetes.io/projected/934bc291-1ca4-4155-bb99-b3fde7a0d5e5-kube-api-access-vh8w4\") pod \"ovn-northd-0\" (UID: \"934bc291-1ca4-4155-bb99-b3fde7a0d5e5\") " pod="openstack/ovn-northd-0" Sep 29 19:14:19 crc kubenswrapper[4792]: I0929 19:14:19.047334 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/934bc291-1ca4-4155-bb99-b3fde7a0d5e5-scripts\") pod \"ovn-northd-0\" (UID: \"934bc291-1ca4-4155-bb99-b3fde7a0d5e5\") " pod="openstack/ovn-northd-0" Sep 29 19:14:19 crc kubenswrapper[4792]: I0929 19:14:19.047398 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/934bc291-1ca4-4155-bb99-b3fde7a0d5e5-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"934bc291-1ca4-4155-bb99-b3fde7a0d5e5\") " pod="openstack/ovn-northd-0" Sep 29 19:14:19 crc kubenswrapper[4792]: I0929 19:14:19.149280 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/934bc291-1ca4-4155-bb99-b3fde7a0d5e5-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"934bc291-1ca4-4155-bb99-b3fde7a0d5e5\") " pod="openstack/ovn-northd-0" Sep 29 19:14:19 crc kubenswrapper[4792]: I0929 19:14:19.149332 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/934bc291-1ca4-4155-bb99-b3fde7a0d5e5-config\") pod \"ovn-northd-0\" (UID: \"934bc291-1ca4-4155-bb99-b3fde7a0d5e5\") " pod="openstack/ovn-northd-0" Sep 29 19:14:19 crc kubenswrapper[4792]: I0929 19:14:19.149357 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/934bc291-1ca4-4155-bb99-b3fde7a0d5e5-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"934bc291-1ca4-4155-bb99-b3fde7a0d5e5\") " pod="openstack/ovn-northd-0" Sep 29 19:14:19 crc kubenswrapper[4792]: I0929 19:14:19.149399 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/934bc291-1ca4-4155-bb99-b3fde7a0d5e5-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"934bc291-1ca4-4155-bb99-b3fde7a0d5e5\") " pod="openstack/ovn-northd-0" Sep 29 19:14:19 crc kubenswrapper[4792]: I0929 19:14:19.149456 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vh8w4\" (UniqueName: \"kubernetes.io/projected/934bc291-1ca4-4155-bb99-b3fde7a0d5e5-kube-api-access-vh8w4\") pod \"ovn-northd-0\" (UID: \"934bc291-1ca4-4155-bb99-b3fde7a0d5e5\") " pod="openstack/ovn-northd-0" Sep 29 19:14:19 crc kubenswrapper[4792]: I0929 19:14:19.149474 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/934bc291-1ca4-4155-bb99-b3fde7a0d5e5-scripts\") pod \"ovn-northd-0\" (UID: \"934bc291-1ca4-4155-bb99-b3fde7a0d5e5\") " pod="openstack/ovn-northd-0" Sep 29 19:14:19 crc kubenswrapper[4792]: I0929 19:14:19.149495 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/934bc291-1ca4-4155-bb99-b3fde7a0d5e5-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"934bc291-1ca4-4155-bb99-b3fde7a0d5e5\") " pod="openstack/ovn-northd-0" Sep 29 19:14:19 crc kubenswrapper[4792]: I0929 19:14:19.150229 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/934bc291-1ca4-4155-bb99-b3fde7a0d5e5-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"934bc291-1ca4-4155-bb99-b3fde7a0d5e5\") " pod="openstack/ovn-northd-0" Sep 29 19:14:19 crc kubenswrapper[4792]: I0929 19:14:19.150447 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/934bc291-1ca4-4155-bb99-b3fde7a0d5e5-config\") pod \"ovn-northd-0\" (UID: \"934bc291-1ca4-4155-bb99-b3fde7a0d5e5\") " pod="openstack/ovn-northd-0" Sep 29 19:14:19 crc kubenswrapper[4792]: I0929 19:14:19.151646 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/934bc291-1ca4-4155-bb99-b3fde7a0d5e5-scripts\") pod \"ovn-northd-0\" (UID: \"934bc291-1ca4-4155-bb99-b3fde7a0d5e5\") " pod="openstack/ovn-northd-0" Sep 29 19:14:19 crc kubenswrapper[4792]: I0929 19:14:19.155957 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/934bc291-1ca4-4155-bb99-b3fde7a0d5e5-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"934bc291-1ca4-4155-bb99-b3fde7a0d5e5\") " pod="openstack/ovn-northd-0" Sep 29 19:14:19 crc kubenswrapper[4792]: I0929 19:14:19.167282 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/934bc291-1ca4-4155-bb99-b3fde7a0d5e5-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"934bc291-1ca4-4155-bb99-b3fde7a0d5e5\") " pod="openstack/ovn-northd-0" Sep 29 19:14:19 crc kubenswrapper[4792]: I0929 19:14:19.168064 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/934bc291-1ca4-4155-bb99-b3fde7a0d5e5-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"934bc291-1ca4-4155-bb99-b3fde7a0d5e5\") " pod="openstack/ovn-northd-0" Sep 29 19:14:19 crc kubenswrapper[4792]: I0929 19:14:19.173312 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vh8w4\" (UniqueName: \"kubernetes.io/projected/934bc291-1ca4-4155-bb99-b3fde7a0d5e5-kube-api-access-vh8w4\") pod \"ovn-northd-0\" (UID: \"934bc291-1ca4-4155-bb99-b3fde7a0d5e5\") " pod="openstack/ovn-northd-0" Sep 29 19:14:19 crc kubenswrapper[4792]: I0929 19:14:19.215370 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Sep 29 19:14:19 crc kubenswrapper[4792]: I0929 19:14:19.634014 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-northd-0"] Sep 29 19:14:20 crc kubenswrapper[4792]: I0929 19:14:20.489245 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"934bc291-1ca4-4155-bb99-b3fde7a0d5e5","Type":"ContainerStarted","Data":"7962fb4d89fc17755b499c454d33356588da3c7a0763896c7ace719e7a12e363"} Sep 29 19:14:21 crc kubenswrapper[4792]: I0929 19:14:21.458148 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/kube-state-metrics-0" Sep 29 19:14:21 crc kubenswrapper[4792]: I0929 19:14:21.550044 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-metrics-jcjdl" event={"ID":"51d7ae2f-4cac-4245-b001-91413652f89e","Type":"ContainerStarted","Data":"272adeab0325e2d0c3c8cae52dcf3118cb39cc5088d2d6f2e523b00acb92dc27"} Sep 29 19:14:22 crc kubenswrapper[4792]: I0929 19:14:22.298039 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/openstack-galera-0" Sep 29 19:14:22 crc kubenswrapper[4792]: I0929 19:14:22.305125 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/openstack-cell1-galera-0" Sep 29 19:14:22 crc kubenswrapper[4792]: I0929 19:14:22.372746 4792 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/openstack-galera-0" podUID="5c8592a0-091a-48ce-996c-f42bbdaf240c" containerName="galera" probeResult="failure" output=< Sep 29 19:14:22 crc kubenswrapper[4792]: wsrep_local_state_comment (Joined) differs from Synced Sep 29 19:14:22 crc kubenswrapper[4792]: > Sep 29 19:14:22 crc kubenswrapper[4792]: I0929 19:14:22.381464 4792 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/openstack-cell1-galera-0" podUID="9a26454d-9ce8-4591-a7dd-6f8d4df5e3a4" containerName="galera" probeResult="failure" output=< Sep 29 19:14:22 crc kubenswrapper[4792]: wsrep_local_state_comment (Joined) differs from Synced Sep 29 19:14:22 crc kubenswrapper[4792]: > Sep 29 19:14:22 crc kubenswrapper[4792]: I0929 19:14:22.577971 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-metrics-jcjdl" podStartSLOduration=6.577952966 podStartE2EDuration="6.577952966s" podCreationTimestamp="2025-09-29 19:14:16 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 19:14:22.575819529 +0000 UTC m=+1074.569126935" watchObservedRunningTime="2025-09-29 19:14:22.577952966 +0000 UTC m=+1074.571260362" Sep 29 19:14:23 crc kubenswrapper[4792]: I0929 19:14:23.570114 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"934bc291-1ca4-4155-bb99-b3fde7a0d5e5","Type":"ContainerStarted","Data":"cc6fbd3f9175494420cd8a58a8ed268e0c0e3a196a8da8d7601543b1ee20e58f"} Sep 29 19:14:23 crc kubenswrapper[4792]: I0929 19:14:23.570489 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-northd-0" Sep 29 19:14:23 crc kubenswrapper[4792]: I0929 19:14:23.570505 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"934bc291-1ca4-4155-bb99-b3fde7a0d5e5","Type":"ContainerStarted","Data":"37be451ededf3f54cb7580da2d323e5c8bd0e4fef04fef13fab1537bb1a9f6f3"} Sep 29 19:14:23 crc kubenswrapper[4792]: I0929 19:14:23.572337 4792 generic.go:334] "Generic (PLEG): container finished" podID="3e5e32d1-fcfa-4745-a78a-f20d65ff7e2b" containerID="6e94e56bd5863597fb8b0b93b26ada129a9ca41c7deb36757db5907f284876b6" exitCode=0 Sep 29 19:14:23 crc kubenswrapper[4792]: I0929 19:14:23.572421 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8554648995-lp2qs" event={"ID":"3e5e32d1-fcfa-4745-a78a-f20d65ff7e2b","Type":"ContainerDied","Data":"6e94e56bd5863597fb8b0b93b26ada129a9ca41c7deb36757db5907f284876b6"} Sep 29 19:14:23 crc kubenswrapper[4792]: I0929 19:14:23.576051 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"ea8bd43c-bb10-450a-b564-c7b4247d1252","Type":"ContainerStarted","Data":"aa57984a16d30952646bac5ea641fa72367bb26d164a1870893908f7e98b3174"} Sep 29 19:14:23 crc kubenswrapper[4792]: I0929 19:14:23.576239 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/memcached-0" Sep 29 19:14:23 crc kubenswrapper[4792]: I0929 19:14:23.578613 4792 generic.go:334] "Generic (PLEG): container finished" podID="21b8a7a0-b819-46ff-9c50-19ede0e7d888" containerID="b24ad5801a158f24696b2762d4ddb95578f0b87313ae335c8dbdcf68c1ed264e" exitCode=0 Sep 29 19:14:23 crc kubenswrapper[4792]: I0929 19:14:23.578637 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5bf47b49b7-qnpm9" event={"ID":"21b8a7a0-b819-46ff-9c50-19ede0e7d888","Type":"ContainerDied","Data":"b24ad5801a158f24696b2762d4ddb95578f0b87313ae335c8dbdcf68c1ed264e"} Sep 29 19:14:23 crc kubenswrapper[4792]: I0929 19:14:23.606507 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-northd-0" podStartSLOduration=2.212296766 podStartE2EDuration="5.606478394s" podCreationTimestamp="2025-09-29 19:14:18 +0000 UTC" firstStartedPulling="2025-09-29 19:14:19.648638832 +0000 UTC m=+1071.641946228" lastFinishedPulling="2025-09-29 19:14:23.04282046 +0000 UTC m=+1075.036127856" observedRunningTime="2025-09-29 19:14:23.598002221 +0000 UTC m=+1075.591309637" watchObservedRunningTime="2025-09-29 19:14:23.606478394 +0000 UTC m=+1075.599785810" Sep 29 19:14:23 crc kubenswrapper[4792]: I0929 19:14:23.682298 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/memcached-0" podStartSLOduration=3.3578758779999998 podStartE2EDuration="45.682280919s" podCreationTimestamp="2025-09-29 19:13:38 +0000 UTC" firstStartedPulling="2025-09-29 19:13:40.005707012 +0000 UTC m=+1031.999014408" lastFinishedPulling="2025-09-29 19:14:22.330112053 +0000 UTC m=+1074.323419449" observedRunningTime="2025-09-29 19:14:23.682111175 +0000 UTC m=+1075.675418571" watchObservedRunningTime="2025-09-29 19:14:23.682280919 +0000 UTC m=+1075.675588315" Sep 29 19:14:24 crc kubenswrapper[4792]: I0929 19:14:24.588284 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8554648995-lp2qs" event={"ID":"3e5e32d1-fcfa-4745-a78a-f20d65ff7e2b","Type":"ContainerStarted","Data":"f8b7efe49035a6d83c4250f01dc999c1afc5a36a178fd26ad6d6e6867f872486"} Sep 29 19:14:24 crc kubenswrapper[4792]: I0929 19:14:24.588772 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-8554648995-lp2qs" Sep 29 19:14:24 crc kubenswrapper[4792]: I0929 19:14:24.593020 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5bf47b49b7-qnpm9" event={"ID":"21b8a7a0-b819-46ff-9c50-19ede0e7d888","Type":"ContainerStarted","Data":"8c2b6418a3ffd7970c142a361b235df87cd3379b3075b4b39fbca65e23ddc9ae"} Sep 29 19:14:24 crc kubenswrapper[4792]: I0929 19:14:24.611889 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-8554648995-lp2qs" podStartSLOduration=3.376825547 podStartE2EDuration="7.611874375s" podCreationTimestamp="2025-09-29 19:14:17 +0000 UTC" firstStartedPulling="2025-09-29 19:14:18.106089445 +0000 UTC m=+1070.099396841" lastFinishedPulling="2025-09-29 19:14:22.341138273 +0000 UTC m=+1074.334445669" observedRunningTime="2025-09-29 19:14:24.610497138 +0000 UTC m=+1076.603804534" watchObservedRunningTime="2025-09-29 19:14:24.611874375 +0000 UTC m=+1076.605181781" Sep 29 19:14:24 crc kubenswrapper[4792]: I0929 19:14:24.638333 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-5bf47b49b7-qnpm9" podStartSLOduration=4.317861937 podStartE2EDuration="8.638312471s" podCreationTimestamp="2025-09-29 19:14:16 +0000 UTC" firstStartedPulling="2025-09-29 19:14:18.020238657 +0000 UTC m=+1070.013546053" lastFinishedPulling="2025-09-29 19:14:22.340689191 +0000 UTC m=+1074.333996587" observedRunningTime="2025-09-29 19:14:24.63261089 +0000 UTC m=+1076.625918296" watchObservedRunningTime="2025-09-29 19:14:24.638312471 +0000 UTC m=+1076.631619867" Sep 29 19:14:25 crc kubenswrapper[4792]: I0929 19:14:25.604124 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-5bf47b49b7-qnpm9" Sep 29 19:14:28 crc kubenswrapper[4792]: I0929 19:14:28.920582 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/openstack-cell1-galera-0" Sep 29 19:14:29 crc kubenswrapper[4792]: I0929 19:14:29.035384 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/openstack-galera-0" Sep 29 19:14:29 crc kubenswrapper[4792]: I0929 19:14:29.434976 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-db-create-bhpdb"] Sep 29 19:14:29 crc kubenswrapper[4792]: I0929 19:14:29.437281 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-bhpdb" Sep 29 19:14:29 crc kubenswrapper[4792]: I0929 19:14:29.440259 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/memcached-0" Sep 29 19:14:29 crc kubenswrapper[4792]: I0929 19:14:29.450901 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-create-bhpdb"] Sep 29 19:14:29 crc kubenswrapper[4792]: I0929 19:14:29.624804 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8tn2z\" (UniqueName: \"kubernetes.io/projected/903007b3-34eb-437b-adf1-ec511ad037ad-kube-api-access-8tn2z\") pod \"placement-db-create-bhpdb\" (UID: \"903007b3-34eb-437b-adf1-ec511ad037ad\") " pod="openstack/placement-db-create-bhpdb" Sep 29 19:14:29 crc kubenswrapper[4792]: I0929 19:14:29.627949 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-db-create-bvxvv"] Sep 29 19:14:29 crc kubenswrapper[4792]: I0929 19:14:29.629411 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-bvxvv" Sep 29 19:14:29 crc kubenswrapper[4792]: I0929 19:14:29.641511 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-create-bvxvv"] Sep 29 19:14:29 crc kubenswrapper[4792]: I0929 19:14:29.726187 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7t2cg\" (UniqueName: \"kubernetes.io/projected/80431414-82a8-41e6-b6d4-d9e23639c6a3-kube-api-access-7t2cg\") pod \"glance-db-create-bvxvv\" (UID: \"80431414-82a8-41e6-b6d4-d9e23639c6a3\") " pod="openstack/glance-db-create-bvxvv" Sep 29 19:14:29 crc kubenswrapper[4792]: I0929 19:14:29.726284 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8tn2z\" (UniqueName: \"kubernetes.io/projected/903007b3-34eb-437b-adf1-ec511ad037ad-kube-api-access-8tn2z\") pod \"placement-db-create-bhpdb\" (UID: \"903007b3-34eb-437b-adf1-ec511ad037ad\") " pod="openstack/placement-db-create-bhpdb" Sep 29 19:14:29 crc kubenswrapper[4792]: I0929 19:14:29.745592 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8tn2z\" (UniqueName: \"kubernetes.io/projected/903007b3-34eb-437b-adf1-ec511ad037ad-kube-api-access-8tn2z\") pod \"placement-db-create-bhpdb\" (UID: \"903007b3-34eb-437b-adf1-ec511ad037ad\") " pod="openstack/placement-db-create-bhpdb" Sep 29 19:14:29 crc kubenswrapper[4792]: I0929 19:14:29.779361 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-bhpdb" Sep 29 19:14:29 crc kubenswrapper[4792]: I0929 19:14:29.828954 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7t2cg\" (UniqueName: \"kubernetes.io/projected/80431414-82a8-41e6-b6d4-d9e23639c6a3-kube-api-access-7t2cg\") pod \"glance-db-create-bvxvv\" (UID: \"80431414-82a8-41e6-b6d4-d9e23639c6a3\") " pod="openstack/glance-db-create-bvxvv" Sep 29 19:14:29 crc kubenswrapper[4792]: I0929 19:14:29.868144 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7t2cg\" (UniqueName: \"kubernetes.io/projected/80431414-82a8-41e6-b6d4-d9e23639c6a3-kube-api-access-7t2cg\") pod \"glance-db-create-bvxvv\" (UID: \"80431414-82a8-41e6-b6d4-d9e23639c6a3\") " pod="openstack/glance-db-create-bvxvv" Sep 29 19:14:29 crc kubenswrapper[4792]: I0929 19:14:29.961640 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-bvxvv" Sep 29 19:14:30 crc kubenswrapper[4792]: I0929 19:14:30.033433 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-create-bhpdb"] Sep 29 19:14:30 crc kubenswrapper[4792]: W0929 19:14:30.040219 4792 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod903007b3_34eb_437b_adf1_ec511ad037ad.slice/crio-5d860a351cb3bd3f62aecc1645c3f14a9c1e08be7c138ac9b7c0424fa2392cd8 WatchSource:0}: Error finding container 5d860a351cb3bd3f62aecc1645c3f14a9c1e08be7c138ac9b7c0424fa2392cd8: Status 404 returned error can't find the container with id 5d860a351cb3bd3f62aecc1645c3f14a9c1e08be7c138ac9b7c0424fa2392cd8 Sep 29 19:14:30 crc kubenswrapper[4792]: I0929 19:14:30.406763 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-create-bvxvv"] Sep 29 19:14:30 crc kubenswrapper[4792]: I0929 19:14:30.648444 4792 generic.go:334] "Generic (PLEG): container finished" podID="80431414-82a8-41e6-b6d4-d9e23639c6a3" containerID="3d81bcab52a676c2e3edc508229d88c51f2289895c25906ee61ca55a6e620310" exitCode=0 Sep 29 19:14:30 crc kubenswrapper[4792]: I0929 19:14:30.648607 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-bvxvv" event={"ID":"80431414-82a8-41e6-b6d4-d9e23639c6a3","Type":"ContainerDied","Data":"3d81bcab52a676c2e3edc508229d88c51f2289895c25906ee61ca55a6e620310"} Sep 29 19:14:30 crc kubenswrapper[4792]: I0929 19:14:30.648762 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-bvxvv" event={"ID":"80431414-82a8-41e6-b6d4-d9e23639c6a3","Type":"ContainerStarted","Data":"02c69b774000b762325c4de459e601322555ecb73908f37dd193e11ce64b9ea1"} Sep 29 19:14:30 crc kubenswrapper[4792]: I0929 19:14:30.649943 4792 generic.go:334] "Generic (PLEG): container finished" podID="903007b3-34eb-437b-adf1-ec511ad037ad" containerID="1e0d58a3d084433691acd1399be7988139b2165da593e3708db3c700482262d0" exitCode=0 Sep 29 19:14:30 crc kubenswrapper[4792]: I0929 19:14:30.649986 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-bhpdb" event={"ID":"903007b3-34eb-437b-adf1-ec511ad037ad","Type":"ContainerDied","Data":"1e0d58a3d084433691acd1399be7988139b2165da593e3708db3c700482262d0"} Sep 29 19:14:30 crc kubenswrapper[4792]: I0929 19:14:30.650004 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-bhpdb" event={"ID":"903007b3-34eb-437b-adf1-ec511ad037ad","Type":"ContainerStarted","Data":"5d860a351cb3bd3f62aecc1645c3f14a9c1e08be7c138ac9b7c0424fa2392cd8"} Sep 29 19:14:31 crc kubenswrapper[4792]: I0929 19:14:31.523259 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5bf47b49b7-qnpm9"] Sep 29 19:14:31 crc kubenswrapper[4792]: I0929 19:14:31.524804 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-5bf47b49b7-qnpm9" podUID="21b8a7a0-b819-46ff-9c50-19ede0e7d888" containerName="dnsmasq-dns" containerID="cri-o://8c2b6418a3ffd7970c142a361b235df87cd3379b3075b4b39fbca65e23ddc9ae" gracePeriod=10 Sep 29 19:14:31 crc kubenswrapper[4792]: I0929 19:14:31.526018 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-5bf47b49b7-qnpm9" Sep 29 19:14:31 crc kubenswrapper[4792]: I0929 19:14:31.573875 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-b8fbc5445-mgs5n"] Sep 29 19:14:31 crc kubenswrapper[4792]: I0929 19:14:31.575053 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-b8fbc5445-mgs5n" Sep 29 19:14:31 crc kubenswrapper[4792]: I0929 19:14:31.603752 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-b8fbc5445-mgs5n"] Sep 29 19:14:31 crc kubenswrapper[4792]: I0929 19:14:31.675368 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e4b273af-3752-4d48-ae80-4d639b06e836-config\") pod \"dnsmasq-dns-b8fbc5445-mgs5n\" (UID: \"e4b273af-3752-4d48-ae80-4d639b06e836\") " pod="openstack/dnsmasq-dns-b8fbc5445-mgs5n" Sep 29 19:14:31 crc kubenswrapper[4792]: I0929 19:14:31.675471 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e4b273af-3752-4d48-ae80-4d639b06e836-ovsdbserver-nb\") pod \"dnsmasq-dns-b8fbc5445-mgs5n\" (UID: \"e4b273af-3752-4d48-ae80-4d639b06e836\") " pod="openstack/dnsmasq-dns-b8fbc5445-mgs5n" Sep 29 19:14:31 crc kubenswrapper[4792]: I0929 19:14:31.675500 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e4b273af-3752-4d48-ae80-4d639b06e836-ovsdbserver-sb\") pod \"dnsmasq-dns-b8fbc5445-mgs5n\" (UID: \"e4b273af-3752-4d48-ae80-4d639b06e836\") " pod="openstack/dnsmasq-dns-b8fbc5445-mgs5n" Sep 29 19:14:31 crc kubenswrapper[4792]: I0929 19:14:31.675536 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lxz8n\" (UniqueName: \"kubernetes.io/projected/e4b273af-3752-4d48-ae80-4d639b06e836-kube-api-access-lxz8n\") pod \"dnsmasq-dns-b8fbc5445-mgs5n\" (UID: \"e4b273af-3752-4d48-ae80-4d639b06e836\") " pod="openstack/dnsmasq-dns-b8fbc5445-mgs5n" Sep 29 19:14:31 crc kubenswrapper[4792]: I0929 19:14:31.675576 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e4b273af-3752-4d48-ae80-4d639b06e836-dns-svc\") pod \"dnsmasq-dns-b8fbc5445-mgs5n\" (UID: \"e4b273af-3752-4d48-ae80-4d639b06e836\") " pod="openstack/dnsmasq-dns-b8fbc5445-mgs5n" Sep 29 19:14:31 crc kubenswrapper[4792]: I0929 19:14:31.685597 4792 generic.go:334] "Generic (PLEG): container finished" podID="21b8a7a0-b819-46ff-9c50-19ede0e7d888" containerID="8c2b6418a3ffd7970c142a361b235df87cd3379b3075b4b39fbca65e23ddc9ae" exitCode=0 Sep 29 19:14:31 crc kubenswrapper[4792]: I0929 19:14:31.685805 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5bf47b49b7-qnpm9" event={"ID":"21b8a7a0-b819-46ff-9c50-19ede0e7d888","Type":"ContainerDied","Data":"8c2b6418a3ffd7970c142a361b235df87cd3379b3075b4b39fbca65e23ddc9ae"} Sep 29 19:14:31 crc kubenswrapper[4792]: I0929 19:14:31.778452 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e4b273af-3752-4d48-ae80-4d639b06e836-ovsdbserver-nb\") pod \"dnsmasq-dns-b8fbc5445-mgs5n\" (UID: \"e4b273af-3752-4d48-ae80-4d639b06e836\") " pod="openstack/dnsmasq-dns-b8fbc5445-mgs5n" Sep 29 19:14:31 crc kubenswrapper[4792]: I0929 19:14:31.778501 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e4b273af-3752-4d48-ae80-4d639b06e836-ovsdbserver-sb\") pod \"dnsmasq-dns-b8fbc5445-mgs5n\" (UID: \"e4b273af-3752-4d48-ae80-4d639b06e836\") " pod="openstack/dnsmasq-dns-b8fbc5445-mgs5n" Sep 29 19:14:31 crc kubenswrapper[4792]: I0929 19:14:31.778544 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lxz8n\" (UniqueName: \"kubernetes.io/projected/e4b273af-3752-4d48-ae80-4d639b06e836-kube-api-access-lxz8n\") pod \"dnsmasq-dns-b8fbc5445-mgs5n\" (UID: \"e4b273af-3752-4d48-ae80-4d639b06e836\") " pod="openstack/dnsmasq-dns-b8fbc5445-mgs5n" Sep 29 19:14:31 crc kubenswrapper[4792]: I0929 19:14:31.778577 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e4b273af-3752-4d48-ae80-4d639b06e836-dns-svc\") pod \"dnsmasq-dns-b8fbc5445-mgs5n\" (UID: \"e4b273af-3752-4d48-ae80-4d639b06e836\") " pod="openstack/dnsmasq-dns-b8fbc5445-mgs5n" Sep 29 19:14:31 crc kubenswrapper[4792]: I0929 19:14:31.778606 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e4b273af-3752-4d48-ae80-4d639b06e836-config\") pod \"dnsmasq-dns-b8fbc5445-mgs5n\" (UID: \"e4b273af-3752-4d48-ae80-4d639b06e836\") " pod="openstack/dnsmasq-dns-b8fbc5445-mgs5n" Sep 29 19:14:31 crc kubenswrapper[4792]: I0929 19:14:31.779743 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e4b273af-3752-4d48-ae80-4d639b06e836-config\") pod \"dnsmasq-dns-b8fbc5445-mgs5n\" (UID: \"e4b273af-3752-4d48-ae80-4d639b06e836\") " pod="openstack/dnsmasq-dns-b8fbc5445-mgs5n" Sep 29 19:14:31 crc kubenswrapper[4792]: I0929 19:14:31.780740 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e4b273af-3752-4d48-ae80-4d639b06e836-ovsdbserver-nb\") pod \"dnsmasq-dns-b8fbc5445-mgs5n\" (UID: \"e4b273af-3752-4d48-ae80-4d639b06e836\") " pod="openstack/dnsmasq-dns-b8fbc5445-mgs5n" Sep 29 19:14:31 crc kubenswrapper[4792]: I0929 19:14:31.781589 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e4b273af-3752-4d48-ae80-4d639b06e836-ovsdbserver-sb\") pod \"dnsmasq-dns-b8fbc5445-mgs5n\" (UID: \"e4b273af-3752-4d48-ae80-4d639b06e836\") " pod="openstack/dnsmasq-dns-b8fbc5445-mgs5n" Sep 29 19:14:31 crc kubenswrapper[4792]: I0929 19:14:31.782178 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e4b273af-3752-4d48-ae80-4d639b06e836-dns-svc\") pod \"dnsmasq-dns-b8fbc5445-mgs5n\" (UID: \"e4b273af-3752-4d48-ae80-4d639b06e836\") " pod="openstack/dnsmasq-dns-b8fbc5445-mgs5n" Sep 29 19:14:31 crc kubenswrapper[4792]: I0929 19:14:31.833016 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lxz8n\" (UniqueName: \"kubernetes.io/projected/e4b273af-3752-4d48-ae80-4d639b06e836-kube-api-access-lxz8n\") pod \"dnsmasq-dns-b8fbc5445-mgs5n\" (UID: \"e4b273af-3752-4d48-ae80-4d639b06e836\") " pod="openstack/dnsmasq-dns-b8fbc5445-mgs5n" Sep 29 19:14:31 crc kubenswrapper[4792]: I0929 19:14:31.903887 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-b8fbc5445-mgs5n" Sep 29 19:14:32 crc kubenswrapper[4792]: I0929 19:14:32.260433 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-bvxvv" Sep 29 19:14:32 crc kubenswrapper[4792]: I0929 19:14:32.293270 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7t2cg\" (UniqueName: \"kubernetes.io/projected/80431414-82a8-41e6-b6d4-d9e23639c6a3-kube-api-access-7t2cg\") pod \"80431414-82a8-41e6-b6d4-d9e23639c6a3\" (UID: \"80431414-82a8-41e6-b6d4-d9e23639c6a3\") " Sep 29 19:14:32 crc kubenswrapper[4792]: I0929 19:14:32.300893 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/80431414-82a8-41e6-b6d4-d9e23639c6a3-kube-api-access-7t2cg" (OuterVolumeSpecName: "kube-api-access-7t2cg") pod "80431414-82a8-41e6-b6d4-d9e23639c6a3" (UID: "80431414-82a8-41e6-b6d4-d9e23639c6a3"). InnerVolumeSpecName "kube-api-access-7t2cg". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:14:32 crc kubenswrapper[4792]: I0929 19:14:32.395782 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7t2cg\" (UniqueName: \"kubernetes.io/projected/80431414-82a8-41e6-b6d4-d9e23639c6a3-kube-api-access-7t2cg\") on node \"crc\" DevicePath \"\"" Sep 29 19:14:32 crc kubenswrapper[4792]: I0929 19:14:32.429727 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-bhpdb" Sep 29 19:14:32 crc kubenswrapper[4792]: I0929 19:14:32.455669 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5bf47b49b7-qnpm9" Sep 29 19:14:32 crc kubenswrapper[4792]: I0929 19:14:32.497220 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8tn2z\" (UniqueName: \"kubernetes.io/projected/903007b3-34eb-437b-adf1-ec511ad037ad-kube-api-access-8tn2z\") pod \"903007b3-34eb-437b-adf1-ec511ad037ad\" (UID: \"903007b3-34eb-437b-adf1-ec511ad037ad\") " Sep 29 19:14:32 crc kubenswrapper[4792]: I0929 19:14:32.497278 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/21b8a7a0-b819-46ff-9c50-19ede0e7d888-dns-svc\") pod \"21b8a7a0-b819-46ff-9c50-19ede0e7d888\" (UID: \"21b8a7a0-b819-46ff-9c50-19ede0e7d888\") " Sep 29 19:14:32 crc kubenswrapper[4792]: I0929 19:14:32.497298 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2g9vc\" (UniqueName: \"kubernetes.io/projected/21b8a7a0-b819-46ff-9c50-19ede0e7d888-kube-api-access-2g9vc\") pod \"21b8a7a0-b819-46ff-9c50-19ede0e7d888\" (UID: \"21b8a7a0-b819-46ff-9c50-19ede0e7d888\") " Sep 29 19:14:32 crc kubenswrapper[4792]: I0929 19:14:32.497401 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/21b8a7a0-b819-46ff-9c50-19ede0e7d888-ovsdbserver-nb\") pod \"21b8a7a0-b819-46ff-9c50-19ede0e7d888\" (UID: \"21b8a7a0-b819-46ff-9c50-19ede0e7d888\") " Sep 29 19:14:32 crc kubenswrapper[4792]: I0929 19:14:32.497511 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/21b8a7a0-b819-46ff-9c50-19ede0e7d888-config\") pod \"21b8a7a0-b819-46ff-9c50-19ede0e7d888\" (UID: \"21b8a7a0-b819-46ff-9c50-19ede0e7d888\") " Sep 29 19:14:32 crc kubenswrapper[4792]: I0929 19:14:32.500096 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/21b8a7a0-b819-46ff-9c50-19ede0e7d888-kube-api-access-2g9vc" (OuterVolumeSpecName: "kube-api-access-2g9vc") pod "21b8a7a0-b819-46ff-9c50-19ede0e7d888" (UID: "21b8a7a0-b819-46ff-9c50-19ede0e7d888"). InnerVolumeSpecName "kube-api-access-2g9vc". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:14:32 crc kubenswrapper[4792]: I0929 19:14:32.500689 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/903007b3-34eb-437b-adf1-ec511ad037ad-kube-api-access-8tn2z" (OuterVolumeSpecName: "kube-api-access-8tn2z") pod "903007b3-34eb-437b-adf1-ec511ad037ad" (UID: "903007b3-34eb-437b-adf1-ec511ad037ad"). InnerVolumeSpecName "kube-api-access-8tn2z". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:14:32 crc kubenswrapper[4792]: I0929 19:14:32.541554 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/21b8a7a0-b819-46ff-9c50-19ede0e7d888-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "21b8a7a0-b819-46ff-9c50-19ede0e7d888" (UID: "21b8a7a0-b819-46ff-9c50-19ede0e7d888"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:14:32 crc kubenswrapper[4792]: I0929 19:14:32.553268 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/21b8a7a0-b819-46ff-9c50-19ede0e7d888-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "21b8a7a0-b819-46ff-9c50-19ede0e7d888" (UID: "21b8a7a0-b819-46ff-9c50-19ede0e7d888"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:14:32 crc kubenswrapper[4792]: I0929 19:14:32.560479 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/21b8a7a0-b819-46ff-9c50-19ede0e7d888-config" (OuterVolumeSpecName: "config") pod "21b8a7a0-b819-46ff-9c50-19ede0e7d888" (UID: "21b8a7a0-b819-46ff-9c50-19ede0e7d888"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:14:32 crc kubenswrapper[4792]: I0929 19:14:32.598999 4792 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/21b8a7a0-b819-46ff-9c50-19ede0e7d888-config\") on node \"crc\" DevicePath \"\"" Sep 29 19:14:32 crc kubenswrapper[4792]: I0929 19:14:32.599066 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8tn2z\" (UniqueName: \"kubernetes.io/projected/903007b3-34eb-437b-adf1-ec511ad037ad-kube-api-access-8tn2z\") on node \"crc\" DevicePath \"\"" Sep 29 19:14:32 crc kubenswrapper[4792]: I0929 19:14:32.599081 4792 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/21b8a7a0-b819-46ff-9c50-19ede0e7d888-dns-svc\") on node \"crc\" DevicePath \"\"" Sep 29 19:14:32 crc kubenswrapper[4792]: I0929 19:14:32.599092 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2g9vc\" (UniqueName: \"kubernetes.io/projected/21b8a7a0-b819-46ff-9c50-19ede0e7d888-kube-api-access-2g9vc\") on node \"crc\" DevicePath \"\"" Sep 29 19:14:32 crc kubenswrapper[4792]: I0929 19:14:32.599103 4792 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/21b8a7a0-b819-46ff-9c50-19ede0e7d888-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Sep 29 19:14:32 crc kubenswrapper[4792]: I0929 19:14:32.604084 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-8554648995-lp2qs" Sep 29 19:14:32 crc kubenswrapper[4792]: I0929 19:14:32.693911 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-bvxvv" Sep 29 19:14:32 crc kubenswrapper[4792]: I0929 19:14:32.693902 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-bvxvv" event={"ID":"80431414-82a8-41e6-b6d4-d9e23639c6a3","Type":"ContainerDied","Data":"02c69b774000b762325c4de459e601322555ecb73908f37dd193e11ce64b9ea1"} Sep 29 19:14:32 crc kubenswrapper[4792]: I0929 19:14:32.694023 4792 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="02c69b774000b762325c4de459e601322555ecb73908f37dd193e11ce64b9ea1" Sep 29 19:14:32 crc kubenswrapper[4792]: I0929 19:14:32.695678 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5bf47b49b7-qnpm9" event={"ID":"21b8a7a0-b819-46ff-9c50-19ede0e7d888","Type":"ContainerDied","Data":"8be46d7d8398f2e12a37aa648adfc380611f5d6fe4d6aabf853d55e42d852cd4"} Sep 29 19:14:32 crc kubenswrapper[4792]: I0929 19:14:32.695715 4792 scope.go:117] "RemoveContainer" containerID="8c2b6418a3ffd7970c142a361b235df87cd3379b3075b4b39fbca65e23ddc9ae" Sep 29 19:14:32 crc kubenswrapper[4792]: I0929 19:14:32.695786 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5bf47b49b7-qnpm9" Sep 29 19:14:32 crc kubenswrapper[4792]: I0929 19:14:32.697879 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-bhpdb" Sep 29 19:14:32 crc kubenswrapper[4792]: I0929 19:14:32.697836 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-bhpdb" event={"ID":"903007b3-34eb-437b-adf1-ec511ad037ad","Type":"ContainerDied","Data":"5d860a351cb3bd3f62aecc1645c3f14a9c1e08be7c138ac9b7c0424fa2392cd8"} Sep 29 19:14:32 crc kubenswrapper[4792]: I0929 19:14:32.697952 4792 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5d860a351cb3bd3f62aecc1645c3f14a9c1e08be7c138ac9b7c0424fa2392cd8" Sep 29 19:14:32 crc kubenswrapper[4792]: I0929 19:14:32.717003 4792 scope.go:117] "RemoveContainer" containerID="b24ad5801a158f24696b2762d4ddb95578f0b87313ae335c8dbdcf68c1ed264e" Sep 29 19:14:32 crc kubenswrapper[4792]: I0929 19:14:32.731737 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5bf47b49b7-qnpm9"] Sep 29 19:14:32 crc kubenswrapper[4792]: I0929 19:14:32.742943 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-b8fbc5445-mgs5n"] Sep 29 19:14:32 crc kubenswrapper[4792]: I0929 19:14:32.745691 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5bf47b49b7-qnpm9"] Sep 29 19:14:32 crc kubenswrapper[4792]: W0929 19:14:32.747215 4792 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode4b273af_3752_4d48_ae80_4d639b06e836.slice/crio-c8dcbfadbd3594ec133d924ba47e910a1a469ec2b09704c4dd3ac676dc803048 WatchSource:0}: Error finding container c8dcbfadbd3594ec133d924ba47e910a1a469ec2b09704c4dd3ac676dc803048: Status 404 returned error can't find the container with id c8dcbfadbd3594ec133d924ba47e910a1a469ec2b09704c4dd3ac676dc803048 Sep 29 19:14:32 crc kubenswrapper[4792]: I0929 19:14:32.788364 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-storage-0"] Sep 29 19:14:32 crc kubenswrapper[4792]: E0929 19:14:32.789121 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="21b8a7a0-b819-46ff-9c50-19ede0e7d888" containerName="init" Sep 29 19:14:32 crc kubenswrapper[4792]: I0929 19:14:32.789142 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="21b8a7a0-b819-46ff-9c50-19ede0e7d888" containerName="init" Sep 29 19:14:32 crc kubenswrapper[4792]: E0929 19:14:32.789153 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="21b8a7a0-b819-46ff-9c50-19ede0e7d888" containerName="dnsmasq-dns" Sep 29 19:14:32 crc kubenswrapper[4792]: I0929 19:14:32.791336 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="21b8a7a0-b819-46ff-9c50-19ede0e7d888" containerName="dnsmasq-dns" Sep 29 19:14:32 crc kubenswrapper[4792]: E0929 19:14:32.791427 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="80431414-82a8-41e6-b6d4-d9e23639c6a3" containerName="mariadb-database-create" Sep 29 19:14:32 crc kubenswrapper[4792]: I0929 19:14:32.791441 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="80431414-82a8-41e6-b6d4-d9e23639c6a3" containerName="mariadb-database-create" Sep 29 19:14:32 crc kubenswrapper[4792]: E0929 19:14:32.791518 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="903007b3-34eb-437b-adf1-ec511ad037ad" containerName="mariadb-database-create" Sep 29 19:14:32 crc kubenswrapper[4792]: I0929 19:14:32.791527 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="903007b3-34eb-437b-adf1-ec511ad037ad" containerName="mariadb-database-create" Sep 29 19:14:32 crc kubenswrapper[4792]: I0929 19:14:32.791818 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="21b8a7a0-b819-46ff-9c50-19ede0e7d888" containerName="dnsmasq-dns" Sep 29 19:14:32 crc kubenswrapper[4792]: I0929 19:14:32.791868 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="80431414-82a8-41e6-b6d4-d9e23639c6a3" containerName="mariadb-database-create" Sep 29 19:14:32 crc kubenswrapper[4792]: I0929 19:14:32.791878 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="903007b3-34eb-437b-adf1-ec511ad037ad" containerName="mariadb-database-create" Sep 29 19:14:32 crc kubenswrapper[4792]: I0929 19:14:32.798302 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-storage-0" Sep 29 19:14:32 crc kubenswrapper[4792]: I0929 19:14:32.801302 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-ring-files" Sep 29 19:14:32 crc kubenswrapper[4792]: I0929 19:14:32.801873 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-storage-config-data" Sep 29 19:14:32 crc kubenswrapper[4792]: I0929 19:14:32.802121 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-swift-dockercfg-sp2s5" Sep 29 19:14:32 crc kubenswrapper[4792]: I0929 19:14:32.802254 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-conf" Sep 29 19:14:32 crc kubenswrapper[4792]: I0929 19:14:32.802709 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-storage-0"] Sep 29 19:14:32 crc kubenswrapper[4792]: I0929 19:14:32.903353 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t277l\" (UniqueName: \"kubernetes.io/projected/cd33a904-c32b-4781-b3fe-53d903764497-kube-api-access-t277l\") pod \"swift-storage-0\" (UID: \"cd33a904-c32b-4781-b3fe-53d903764497\") " pod="openstack/swift-storage-0" Sep 29 19:14:32 crc kubenswrapper[4792]: I0929 19:14:32.903514 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/cd33a904-c32b-4781-b3fe-53d903764497-cache\") pod \"swift-storage-0\" (UID: \"cd33a904-c32b-4781-b3fe-53d903764497\") " pod="openstack/swift-storage-0" Sep 29 19:14:32 crc kubenswrapper[4792]: I0929 19:14:32.903877 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/cd33a904-c32b-4781-b3fe-53d903764497-etc-swift\") pod \"swift-storage-0\" (UID: \"cd33a904-c32b-4781-b3fe-53d903764497\") " pod="openstack/swift-storage-0" Sep 29 19:14:32 crc kubenswrapper[4792]: I0929 19:14:32.903993 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"swift-storage-0\" (UID: \"cd33a904-c32b-4781-b3fe-53d903764497\") " pod="openstack/swift-storage-0" Sep 29 19:14:32 crc kubenswrapper[4792]: I0929 19:14:32.904040 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/cd33a904-c32b-4781-b3fe-53d903764497-lock\") pod \"swift-storage-0\" (UID: \"cd33a904-c32b-4781-b3fe-53d903764497\") " pod="openstack/swift-storage-0" Sep 29 19:14:33 crc kubenswrapper[4792]: I0929 19:14:33.004939 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"swift-storage-0\" (UID: \"cd33a904-c32b-4781-b3fe-53d903764497\") " pod="openstack/swift-storage-0" Sep 29 19:14:33 crc kubenswrapper[4792]: I0929 19:14:33.005213 4792 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"swift-storage-0\" (UID: \"cd33a904-c32b-4781-b3fe-53d903764497\") device mount path \"/mnt/openstack/pv11\"" pod="openstack/swift-storage-0" Sep 29 19:14:33 crc kubenswrapper[4792]: I0929 19:14:33.005243 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/cd33a904-c32b-4781-b3fe-53d903764497-lock\") pod \"swift-storage-0\" (UID: \"cd33a904-c32b-4781-b3fe-53d903764497\") " pod="openstack/swift-storage-0" Sep 29 19:14:33 crc kubenswrapper[4792]: I0929 19:14:33.005302 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t277l\" (UniqueName: \"kubernetes.io/projected/cd33a904-c32b-4781-b3fe-53d903764497-kube-api-access-t277l\") pod \"swift-storage-0\" (UID: \"cd33a904-c32b-4781-b3fe-53d903764497\") " pod="openstack/swift-storage-0" Sep 29 19:14:33 crc kubenswrapper[4792]: I0929 19:14:33.005325 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/cd33a904-c32b-4781-b3fe-53d903764497-cache\") pod \"swift-storage-0\" (UID: \"cd33a904-c32b-4781-b3fe-53d903764497\") " pod="openstack/swift-storage-0" Sep 29 19:14:33 crc kubenswrapper[4792]: I0929 19:14:33.005387 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/cd33a904-c32b-4781-b3fe-53d903764497-etc-swift\") pod \"swift-storage-0\" (UID: \"cd33a904-c32b-4781-b3fe-53d903764497\") " pod="openstack/swift-storage-0" Sep 29 19:14:33 crc kubenswrapper[4792]: E0929 19:14:33.005557 4792 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Sep 29 19:14:33 crc kubenswrapper[4792]: E0929 19:14:33.005575 4792 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Sep 29 19:14:33 crc kubenswrapper[4792]: E0929 19:14:33.005621 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/cd33a904-c32b-4781-b3fe-53d903764497-etc-swift podName:cd33a904-c32b-4781-b3fe-53d903764497 nodeName:}" failed. No retries permitted until 2025-09-29 19:14:33.5056045 +0000 UTC m=+1085.498911896 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/cd33a904-c32b-4781-b3fe-53d903764497-etc-swift") pod "swift-storage-0" (UID: "cd33a904-c32b-4781-b3fe-53d903764497") : configmap "swift-ring-files" not found Sep 29 19:14:33 crc kubenswrapper[4792]: I0929 19:14:33.005557 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/cd33a904-c32b-4781-b3fe-53d903764497-lock\") pod \"swift-storage-0\" (UID: \"cd33a904-c32b-4781-b3fe-53d903764497\") " pod="openstack/swift-storage-0" Sep 29 19:14:33 crc kubenswrapper[4792]: I0929 19:14:33.005887 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/cd33a904-c32b-4781-b3fe-53d903764497-cache\") pod \"swift-storage-0\" (UID: \"cd33a904-c32b-4781-b3fe-53d903764497\") " pod="openstack/swift-storage-0" Sep 29 19:14:33 crc kubenswrapper[4792]: I0929 19:14:33.025696 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t277l\" (UniqueName: \"kubernetes.io/projected/cd33a904-c32b-4781-b3fe-53d903764497-kube-api-access-t277l\") pod \"swift-storage-0\" (UID: \"cd33a904-c32b-4781-b3fe-53d903764497\") " pod="openstack/swift-storage-0" Sep 29 19:14:33 crc kubenswrapper[4792]: I0929 19:14:33.028824 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="21b8a7a0-b819-46ff-9c50-19ede0e7d888" path="/var/lib/kubelet/pods/21b8a7a0-b819-46ff-9c50-19ede0e7d888/volumes" Sep 29 19:14:33 crc kubenswrapper[4792]: I0929 19:14:33.044170 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"swift-storage-0\" (UID: \"cd33a904-c32b-4781-b3fe-53d903764497\") " pod="openstack/swift-storage-0" Sep 29 19:14:33 crc kubenswrapper[4792]: I0929 19:14:33.514833 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/cd33a904-c32b-4781-b3fe-53d903764497-etc-swift\") pod \"swift-storage-0\" (UID: \"cd33a904-c32b-4781-b3fe-53d903764497\") " pod="openstack/swift-storage-0" Sep 29 19:14:33 crc kubenswrapper[4792]: E0929 19:14:33.515456 4792 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Sep 29 19:14:33 crc kubenswrapper[4792]: E0929 19:14:33.515471 4792 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Sep 29 19:14:33 crc kubenswrapper[4792]: E0929 19:14:33.515553 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/cd33a904-c32b-4781-b3fe-53d903764497-etc-swift podName:cd33a904-c32b-4781-b3fe-53d903764497 nodeName:}" failed. No retries permitted until 2025-09-29 19:14:34.51551964 +0000 UTC m=+1086.508827026 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/cd33a904-c32b-4781-b3fe-53d903764497-etc-swift") pod "swift-storage-0" (UID: "cd33a904-c32b-4781-b3fe-53d903764497") : configmap "swift-ring-files" not found Sep 29 19:14:33 crc kubenswrapper[4792]: I0929 19:14:33.711682 4792 generic.go:334] "Generic (PLEG): container finished" podID="e4b273af-3752-4d48-ae80-4d639b06e836" containerID="0ca9398d6a414aca77df34ba76c145ec1bb7471232b87ae9e57d36750f3b4bb1" exitCode=0 Sep 29 19:14:33 crc kubenswrapper[4792]: I0929 19:14:33.711744 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-b8fbc5445-mgs5n" event={"ID":"e4b273af-3752-4d48-ae80-4d639b06e836","Type":"ContainerDied","Data":"0ca9398d6a414aca77df34ba76c145ec1bb7471232b87ae9e57d36750f3b4bb1"} Sep 29 19:14:33 crc kubenswrapper[4792]: I0929 19:14:33.711813 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-b8fbc5445-mgs5n" event={"ID":"e4b273af-3752-4d48-ae80-4d639b06e836","Type":"ContainerStarted","Data":"c8dcbfadbd3594ec133d924ba47e910a1a469ec2b09704c4dd3ac676dc803048"} Sep 29 19:14:34 crc kubenswrapper[4792]: I0929 19:14:34.292560 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-northd-0" Sep 29 19:14:34 crc kubenswrapper[4792]: I0929 19:14:34.535696 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/cd33a904-c32b-4781-b3fe-53d903764497-etc-swift\") pod \"swift-storage-0\" (UID: \"cd33a904-c32b-4781-b3fe-53d903764497\") " pod="openstack/swift-storage-0" Sep 29 19:14:34 crc kubenswrapper[4792]: E0929 19:14:34.535964 4792 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Sep 29 19:14:34 crc kubenswrapper[4792]: E0929 19:14:34.536169 4792 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Sep 29 19:14:34 crc kubenswrapper[4792]: E0929 19:14:34.536233 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/cd33a904-c32b-4781-b3fe-53d903764497-etc-swift podName:cd33a904-c32b-4781-b3fe-53d903764497 nodeName:}" failed. No retries permitted until 2025-09-29 19:14:36.536210013 +0000 UTC m=+1088.529517409 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/cd33a904-c32b-4781-b3fe-53d903764497-etc-swift") pod "swift-storage-0" (UID: "cd33a904-c32b-4781-b3fe-53d903764497") : configmap "swift-ring-files" not found Sep 29 19:14:34 crc kubenswrapper[4792]: I0929 19:14:34.722176 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-b8fbc5445-mgs5n" event={"ID":"e4b273af-3752-4d48-ae80-4d639b06e836","Type":"ContainerStarted","Data":"9916413a2cc81dab7505eced696984a99cc43dd3bc6234a6ccf8a181a76593a2"} Sep 29 19:14:34 crc kubenswrapper[4792]: I0929 19:14:34.722391 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-b8fbc5445-mgs5n" Sep 29 19:14:34 crc kubenswrapper[4792]: I0929 19:14:34.739752 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-b8fbc5445-mgs5n" podStartSLOduration=3.739731499 podStartE2EDuration="3.739731499s" podCreationTimestamp="2025-09-29 19:14:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 19:14:34.738466335 +0000 UTC m=+1086.731773761" watchObservedRunningTime="2025-09-29 19:14:34.739731499 +0000 UTC m=+1086.733038885" Sep 29 19:14:36 crc kubenswrapper[4792]: I0929 19:14:36.569218 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/cd33a904-c32b-4781-b3fe-53d903764497-etc-swift\") pod \"swift-storage-0\" (UID: \"cd33a904-c32b-4781-b3fe-53d903764497\") " pod="openstack/swift-storage-0" Sep 29 19:14:36 crc kubenswrapper[4792]: E0929 19:14:36.569425 4792 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Sep 29 19:14:36 crc kubenswrapper[4792]: E0929 19:14:36.569883 4792 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Sep 29 19:14:36 crc kubenswrapper[4792]: E0929 19:14:36.569941 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/cd33a904-c32b-4781-b3fe-53d903764497-etc-swift podName:cd33a904-c32b-4781-b3fe-53d903764497 nodeName:}" failed. No retries permitted until 2025-09-29 19:14:40.569923805 +0000 UTC m=+1092.563231201 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/cd33a904-c32b-4781-b3fe-53d903764497-etc-swift") pod "swift-storage-0" (UID: "cd33a904-c32b-4781-b3fe-53d903764497") : configmap "swift-ring-files" not found Sep 29 19:14:36 crc kubenswrapper[4792]: I0929 19:14:36.590153 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-ring-rebalance-scl6k"] Sep 29 19:14:36 crc kubenswrapper[4792]: I0929 19:14:36.591182 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-scl6k" Sep 29 19:14:36 crc kubenswrapper[4792]: I0929 19:14:36.594472 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-ring-scripts" Sep 29 19:14:36 crc kubenswrapper[4792]: I0929 19:14:36.595233 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-proxy-config-data" Sep 29 19:14:36 crc kubenswrapper[4792]: I0929 19:14:36.605673 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-ring-config-data" Sep 29 19:14:36 crc kubenswrapper[4792]: I0929 19:14:36.609928 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-ring-rebalance-scl6k"] Sep 29 19:14:36 crc kubenswrapper[4792]: I0929 19:14:36.671702 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lnrz7\" (UniqueName: \"kubernetes.io/projected/654442d0-5361-4c10-b60a-2eb3bcf71acd-kube-api-access-lnrz7\") pod \"swift-ring-rebalance-scl6k\" (UID: \"654442d0-5361-4c10-b60a-2eb3bcf71acd\") " pod="openstack/swift-ring-rebalance-scl6k" Sep 29 19:14:36 crc kubenswrapper[4792]: I0929 19:14:36.671752 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/654442d0-5361-4c10-b60a-2eb3bcf71acd-swiftconf\") pod \"swift-ring-rebalance-scl6k\" (UID: \"654442d0-5361-4c10-b60a-2eb3bcf71acd\") " pod="openstack/swift-ring-rebalance-scl6k" Sep 29 19:14:36 crc kubenswrapper[4792]: I0929 19:14:36.671783 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/654442d0-5361-4c10-b60a-2eb3bcf71acd-scripts\") pod \"swift-ring-rebalance-scl6k\" (UID: \"654442d0-5361-4c10-b60a-2eb3bcf71acd\") " pod="openstack/swift-ring-rebalance-scl6k" Sep 29 19:14:36 crc kubenswrapper[4792]: I0929 19:14:36.671805 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/654442d0-5361-4c10-b60a-2eb3bcf71acd-dispersionconf\") pod \"swift-ring-rebalance-scl6k\" (UID: \"654442d0-5361-4c10-b60a-2eb3bcf71acd\") " pod="openstack/swift-ring-rebalance-scl6k" Sep 29 19:14:36 crc kubenswrapper[4792]: I0929 19:14:36.671947 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/654442d0-5361-4c10-b60a-2eb3bcf71acd-ring-data-devices\") pod \"swift-ring-rebalance-scl6k\" (UID: \"654442d0-5361-4c10-b60a-2eb3bcf71acd\") " pod="openstack/swift-ring-rebalance-scl6k" Sep 29 19:14:36 crc kubenswrapper[4792]: I0929 19:14:36.671965 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/654442d0-5361-4c10-b60a-2eb3bcf71acd-etc-swift\") pod \"swift-ring-rebalance-scl6k\" (UID: \"654442d0-5361-4c10-b60a-2eb3bcf71acd\") " pod="openstack/swift-ring-rebalance-scl6k" Sep 29 19:14:36 crc kubenswrapper[4792]: I0929 19:14:36.671997 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/654442d0-5361-4c10-b60a-2eb3bcf71acd-combined-ca-bundle\") pod \"swift-ring-rebalance-scl6k\" (UID: \"654442d0-5361-4c10-b60a-2eb3bcf71acd\") " pod="openstack/swift-ring-rebalance-scl6k" Sep 29 19:14:36 crc kubenswrapper[4792]: I0929 19:14:36.773463 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lnrz7\" (UniqueName: \"kubernetes.io/projected/654442d0-5361-4c10-b60a-2eb3bcf71acd-kube-api-access-lnrz7\") pod \"swift-ring-rebalance-scl6k\" (UID: \"654442d0-5361-4c10-b60a-2eb3bcf71acd\") " pod="openstack/swift-ring-rebalance-scl6k" Sep 29 19:14:36 crc kubenswrapper[4792]: I0929 19:14:36.773510 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/654442d0-5361-4c10-b60a-2eb3bcf71acd-swiftconf\") pod \"swift-ring-rebalance-scl6k\" (UID: \"654442d0-5361-4c10-b60a-2eb3bcf71acd\") " pod="openstack/swift-ring-rebalance-scl6k" Sep 29 19:14:36 crc kubenswrapper[4792]: I0929 19:14:36.773551 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/654442d0-5361-4c10-b60a-2eb3bcf71acd-scripts\") pod \"swift-ring-rebalance-scl6k\" (UID: \"654442d0-5361-4c10-b60a-2eb3bcf71acd\") " pod="openstack/swift-ring-rebalance-scl6k" Sep 29 19:14:36 crc kubenswrapper[4792]: I0929 19:14:36.773576 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/654442d0-5361-4c10-b60a-2eb3bcf71acd-dispersionconf\") pod \"swift-ring-rebalance-scl6k\" (UID: \"654442d0-5361-4c10-b60a-2eb3bcf71acd\") " pod="openstack/swift-ring-rebalance-scl6k" Sep 29 19:14:36 crc kubenswrapper[4792]: I0929 19:14:36.773685 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/654442d0-5361-4c10-b60a-2eb3bcf71acd-ring-data-devices\") pod \"swift-ring-rebalance-scl6k\" (UID: \"654442d0-5361-4c10-b60a-2eb3bcf71acd\") " pod="openstack/swift-ring-rebalance-scl6k" Sep 29 19:14:36 crc kubenswrapper[4792]: I0929 19:14:36.773738 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/654442d0-5361-4c10-b60a-2eb3bcf71acd-etc-swift\") pod \"swift-ring-rebalance-scl6k\" (UID: \"654442d0-5361-4c10-b60a-2eb3bcf71acd\") " pod="openstack/swift-ring-rebalance-scl6k" Sep 29 19:14:36 crc kubenswrapper[4792]: I0929 19:14:36.773788 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/654442d0-5361-4c10-b60a-2eb3bcf71acd-combined-ca-bundle\") pod \"swift-ring-rebalance-scl6k\" (UID: \"654442d0-5361-4c10-b60a-2eb3bcf71acd\") " pod="openstack/swift-ring-rebalance-scl6k" Sep 29 19:14:36 crc kubenswrapper[4792]: I0929 19:14:36.774480 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/654442d0-5361-4c10-b60a-2eb3bcf71acd-etc-swift\") pod \"swift-ring-rebalance-scl6k\" (UID: \"654442d0-5361-4c10-b60a-2eb3bcf71acd\") " pod="openstack/swift-ring-rebalance-scl6k" Sep 29 19:14:36 crc kubenswrapper[4792]: I0929 19:14:36.775091 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/654442d0-5361-4c10-b60a-2eb3bcf71acd-ring-data-devices\") pod \"swift-ring-rebalance-scl6k\" (UID: \"654442d0-5361-4c10-b60a-2eb3bcf71acd\") " pod="openstack/swift-ring-rebalance-scl6k" Sep 29 19:14:36 crc kubenswrapper[4792]: I0929 19:14:36.775519 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/654442d0-5361-4c10-b60a-2eb3bcf71acd-scripts\") pod \"swift-ring-rebalance-scl6k\" (UID: \"654442d0-5361-4c10-b60a-2eb3bcf71acd\") " pod="openstack/swift-ring-rebalance-scl6k" Sep 29 19:14:36 crc kubenswrapper[4792]: I0929 19:14:36.784349 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/654442d0-5361-4c10-b60a-2eb3bcf71acd-swiftconf\") pod \"swift-ring-rebalance-scl6k\" (UID: \"654442d0-5361-4c10-b60a-2eb3bcf71acd\") " pod="openstack/swift-ring-rebalance-scl6k" Sep 29 19:14:36 crc kubenswrapper[4792]: I0929 19:14:36.787943 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/654442d0-5361-4c10-b60a-2eb3bcf71acd-dispersionconf\") pod \"swift-ring-rebalance-scl6k\" (UID: \"654442d0-5361-4c10-b60a-2eb3bcf71acd\") " pod="openstack/swift-ring-rebalance-scl6k" Sep 29 19:14:36 crc kubenswrapper[4792]: I0929 19:14:36.788183 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/654442d0-5361-4c10-b60a-2eb3bcf71acd-combined-ca-bundle\") pod \"swift-ring-rebalance-scl6k\" (UID: \"654442d0-5361-4c10-b60a-2eb3bcf71acd\") " pod="openstack/swift-ring-rebalance-scl6k" Sep 29 19:14:36 crc kubenswrapper[4792]: I0929 19:14:36.790822 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lnrz7\" (UniqueName: \"kubernetes.io/projected/654442d0-5361-4c10-b60a-2eb3bcf71acd-kube-api-access-lnrz7\") pod \"swift-ring-rebalance-scl6k\" (UID: \"654442d0-5361-4c10-b60a-2eb3bcf71acd\") " pod="openstack/swift-ring-rebalance-scl6k" Sep 29 19:14:36 crc kubenswrapper[4792]: I0929 19:14:36.904990 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-scl6k" Sep 29 19:14:37 crc kubenswrapper[4792]: I0929 19:14:37.364224 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-ring-rebalance-scl6k"] Sep 29 19:14:37 crc kubenswrapper[4792]: I0929 19:14:37.422779 4792 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-5bf47b49b7-qnpm9" podUID="21b8a7a0-b819-46ff-9c50-19ede0e7d888" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.111:5353: i/o timeout" Sep 29 19:14:37 crc kubenswrapper[4792]: I0929 19:14:37.747516 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-scl6k" event={"ID":"654442d0-5361-4c10-b60a-2eb3bcf71acd","Type":"ContainerStarted","Data":"9024a0c3fde020da990ab38ce179d636b0feef9fe611f8df021e63afe867ab6a"} Sep 29 19:14:38 crc kubenswrapper[4792]: I0929 19:14:38.881863 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-db-create-5qvbx"] Sep 29 19:14:38 crc kubenswrapper[4792]: I0929 19:14:38.883095 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-5qvbx" Sep 29 19:14:38 crc kubenswrapper[4792]: I0929 19:14:38.890173 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-create-5qvbx"] Sep 29 19:14:38 crc kubenswrapper[4792]: I0929 19:14:38.915351 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h7z9s\" (UniqueName: \"kubernetes.io/projected/eb43922b-132e-4ce3-8004-d0fddc9e7c80-kube-api-access-h7z9s\") pod \"keystone-db-create-5qvbx\" (UID: \"eb43922b-132e-4ce3-8004-d0fddc9e7c80\") " pod="openstack/keystone-db-create-5qvbx" Sep 29 19:14:39 crc kubenswrapper[4792]: I0929 19:14:39.017437 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h7z9s\" (UniqueName: \"kubernetes.io/projected/eb43922b-132e-4ce3-8004-d0fddc9e7c80-kube-api-access-h7z9s\") pod \"keystone-db-create-5qvbx\" (UID: \"eb43922b-132e-4ce3-8004-d0fddc9e7c80\") " pod="openstack/keystone-db-create-5qvbx" Sep 29 19:14:39 crc kubenswrapper[4792]: I0929 19:14:39.044591 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h7z9s\" (UniqueName: \"kubernetes.io/projected/eb43922b-132e-4ce3-8004-d0fddc9e7c80-kube-api-access-h7z9s\") pod \"keystone-db-create-5qvbx\" (UID: \"eb43922b-132e-4ce3-8004-d0fddc9e7c80\") " pod="openstack/keystone-db-create-5qvbx" Sep 29 19:14:39 crc kubenswrapper[4792]: I0929 19:14:39.214200 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-5qvbx" Sep 29 19:14:39 crc kubenswrapper[4792]: I0929 19:14:39.421692 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-4622-account-create-kqskg"] Sep 29 19:14:39 crc kubenswrapper[4792]: I0929 19:14:39.422966 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-4622-account-create-kqskg" Sep 29 19:14:39 crc kubenswrapper[4792]: I0929 19:14:39.429088 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-4622-account-create-kqskg"] Sep 29 19:14:39 crc kubenswrapper[4792]: I0929 19:14:39.432178 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-db-secret" Sep 29 19:14:39 crc kubenswrapper[4792]: I0929 19:14:39.526682 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zcftx\" (UniqueName: \"kubernetes.io/projected/f309741e-5733-4b16-ba3b-f354fca03459-kube-api-access-zcftx\") pod \"placement-4622-account-create-kqskg\" (UID: \"f309741e-5733-4b16-ba3b-f354fca03459\") " pod="openstack/placement-4622-account-create-kqskg" Sep 29 19:14:39 crc kubenswrapper[4792]: I0929 19:14:39.628672 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zcftx\" (UniqueName: \"kubernetes.io/projected/f309741e-5733-4b16-ba3b-f354fca03459-kube-api-access-zcftx\") pod \"placement-4622-account-create-kqskg\" (UID: \"f309741e-5733-4b16-ba3b-f354fca03459\") " pod="openstack/placement-4622-account-create-kqskg" Sep 29 19:14:39 crc kubenswrapper[4792]: I0929 19:14:39.652684 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zcftx\" (UniqueName: \"kubernetes.io/projected/f309741e-5733-4b16-ba3b-f354fca03459-kube-api-access-zcftx\") pod \"placement-4622-account-create-kqskg\" (UID: \"f309741e-5733-4b16-ba3b-f354fca03459\") " pod="openstack/placement-4622-account-create-kqskg" Sep 29 19:14:39 crc kubenswrapper[4792]: I0929 19:14:39.742873 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-4622-account-create-kqskg" Sep 29 19:14:39 crc kubenswrapper[4792]: I0929 19:14:39.799677 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-019a-account-create-22pm5"] Sep 29 19:14:39 crc kubenswrapper[4792]: I0929 19:14:39.800817 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-019a-account-create-22pm5" Sep 29 19:14:39 crc kubenswrapper[4792]: I0929 19:14:39.803904 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-db-secret" Sep 29 19:14:39 crc kubenswrapper[4792]: I0929 19:14:39.810655 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-019a-account-create-22pm5"] Sep 29 19:14:39 crc kubenswrapper[4792]: I0929 19:14:39.932151 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-57qlr\" (UniqueName: \"kubernetes.io/projected/42ef9ed2-647c-4dd1-aca8-625f68ad7a15-kube-api-access-57qlr\") pod \"glance-019a-account-create-22pm5\" (UID: \"42ef9ed2-647c-4dd1-aca8-625f68ad7a15\") " pod="openstack/glance-019a-account-create-22pm5" Sep 29 19:14:40 crc kubenswrapper[4792]: I0929 19:14:40.035016 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-57qlr\" (UniqueName: \"kubernetes.io/projected/42ef9ed2-647c-4dd1-aca8-625f68ad7a15-kube-api-access-57qlr\") pod \"glance-019a-account-create-22pm5\" (UID: \"42ef9ed2-647c-4dd1-aca8-625f68ad7a15\") " pod="openstack/glance-019a-account-create-22pm5" Sep 29 19:14:40 crc kubenswrapper[4792]: I0929 19:14:40.069291 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-57qlr\" (UniqueName: \"kubernetes.io/projected/42ef9ed2-647c-4dd1-aca8-625f68ad7a15-kube-api-access-57qlr\") pod \"glance-019a-account-create-22pm5\" (UID: \"42ef9ed2-647c-4dd1-aca8-625f68ad7a15\") " pod="openstack/glance-019a-account-create-22pm5" Sep 29 19:14:40 crc kubenswrapper[4792]: I0929 19:14:40.126735 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-019a-account-create-22pm5" Sep 29 19:14:40 crc kubenswrapper[4792]: I0929 19:14:40.642874 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/cd33a904-c32b-4781-b3fe-53d903764497-etc-swift\") pod \"swift-storage-0\" (UID: \"cd33a904-c32b-4781-b3fe-53d903764497\") " pod="openstack/swift-storage-0" Sep 29 19:14:40 crc kubenswrapper[4792]: E0929 19:14:40.643004 4792 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Sep 29 19:14:40 crc kubenswrapper[4792]: E0929 19:14:40.643275 4792 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Sep 29 19:14:40 crc kubenswrapper[4792]: E0929 19:14:40.643331 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/cd33a904-c32b-4781-b3fe-53d903764497-etc-swift podName:cd33a904-c32b-4781-b3fe-53d903764497 nodeName:}" failed. No retries permitted until 2025-09-29 19:14:48.643311098 +0000 UTC m=+1100.636618494 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/cd33a904-c32b-4781-b3fe-53d903764497-etc-swift") pod "swift-storage-0" (UID: "cd33a904-c32b-4781-b3fe-53d903764497") : configmap "swift-ring-files" not found Sep 29 19:14:41 crc kubenswrapper[4792]: I0929 19:14:41.073956 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-create-5qvbx"] Sep 29 19:14:41 crc kubenswrapper[4792]: I0929 19:14:41.156485 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-019a-account-create-22pm5"] Sep 29 19:14:41 crc kubenswrapper[4792]: I0929 19:14:41.162348 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-4622-account-create-kqskg"] Sep 29 19:14:41 crc kubenswrapper[4792]: I0929 19:14:41.231282 4792 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ovn-controller-zvckm" podUID="321cc22b-3e6d-429f-aba5-d69c973d889e" containerName="ovn-controller" probeResult="failure" output=< Sep 29 19:14:41 crc kubenswrapper[4792]: ERROR - ovn-controller connection status is 'not connected', expecting 'connected' status Sep 29 19:14:41 crc kubenswrapper[4792]: > Sep 29 19:14:41 crc kubenswrapper[4792]: I0929 19:14:41.783122 4792 generic.go:334] "Generic (PLEG): container finished" podID="f309741e-5733-4b16-ba3b-f354fca03459" containerID="af1cf705ed7ff0a8d8a3545337f7f1cc64c316a00f78da41cd59272bab8ae4aa" exitCode=0 Sep 29 19:14:41 crc kubenswrapper[4792]: I0929 19:14:41.783180 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-4622-account-create-kqskg" event={"ID":"f309741e-5733-4b16-ba3b-f354fca03459","Type":"ContainerDied","Data":"af1cf705ed7ff0a8d8a3545337f7f1cc64c316a00f78da41cd59272bab8ae4aa"} Sep 29 19:14:41 crc kubenswrapper[4792]: I0929 19:14:41.783208 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-4622-account-create-kqskg" event={"ID":"f309741e-5733-4b16-ba3b-f354fca03459","Type":"ContainerStarted","Data":"551c513348342cbbb6de9522d84acf2a91281cc61f0b55e29f22495513a81231"} Sep 29 19:14:41 crc kubenswrapper[4792]: I0929 19:14:41.784753 4792 generic.go:334] "Generic (PLEG): container finished" podID="42ef9ed2-647c-4dd1-aca8-625f68ad7a15" containerID="d63440b09d488d13b24a4c4f5f3881e107e8b29f3efd5358d84538a35eb37e77" exitCode=0 Sep 29 19:14:41 crc kubenswrapper[4792]: I0929 19:14:41.784789 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-019a-account-create-22pm5" event={"ID":"42ef9ed2-647c-4dd1-aca8-625f68ad7a15","Type":"ContainerDied","Data":"d63440b09d488d13b24a4c4f5f3881e107e8b29f3efd5358d84538a35eb37e77"} Sep 29 19:14:41 crc kubenswrapper[4792]: I0929 19:14:41.784804 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-019a-account-create-22pm5" event={"ID":"42ef9ed2-647c-4dd1-aca8-625f68ad7a15","Type":"ContainerStarted","Data":"3ea7e73d8bbbc3437f6c161e89bf2153b0839018c8beef3d38afc2383c9f6f3b"} Sep 29 19:14:41 crc kubenswrapper[4792]: I0929 19:14:41.786067 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-scl6k" event={"ID":"654442d0-5361-4c10-b60a-2eb3bcf71acd","Type":"ContainerStarted","Data":"5a34f767230535444f7a64232831389e1afe738befbac46be68e88671408f1cc"} Sep 29 19:14:41 crc kubenswrapper[4792]: I0929 19:14:41.788025 4792 generic.go:334] "Generic (PLEG): container finished" podID="eb43922b-132e-4ce3-8004-d0fddc9e7c80" containerID="5a7358fbfdb6b20ff36b0d3f3562868f8dc011eadd6b3d2525f2ddb6a6fd5939" exitCode=0 Sep 29 19:14:41 crc kubenswrapper[4792]: I0929 19:14:41.788048 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-5qvbx" event={"ID":"eb43922b-132e-4ce3-8004-d0fddc9e7c80","Type":"ContainerDied","Data":"5a7358fbfdb6b20ff36b0d3f3562868f8dc011eadd6b3d2525f2ddb6a6fd5939"} Sep 29 19:14:41 crc kubenswrapper[4792]: I0929 19:14:41.788061 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-5qvbx" event={"ID":"eb43922b-132e-4ce3-8004-d0fddc9e7c80","Type":"ContainerStarted","Data":"11828ed853d9dc2f1fb7d7db9cb31d025dcb764eab7836504f0aea85b91424fd"} Sep 29 19:14:41 crc kubenswrapper[4792]: I0929 19:14:41.833375 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/swift-ring-rebalance-scl6k" podStartSLOduration=2.6113839519999997 podStartE2EDuration="5.833357498s" podCreationTimestamp="2025-09-29 19:14:36 +0000 UTC" firstStartedPulling="2025-09-29 19:14:37.375836815 +0000 UTC m=+1089.369144211" lastFinishedPulling="2025-09-29 19:14:40.597810361 +0000 UTC m=+1092.591117757" observedRunningTime="2025-09-29 19:14:41.830945585 +0000 UTC m=+1093.824253011" watchObservedRunningTime="2025-09-29 19:14:41.833357498 +0000 UTC m=+1093.826664904" Sep 29 19:14:41 crc kubenswrapper[4792]: I0929 19:14:41.906190 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-b8fbc5445-mgs5n" Sep 29 19:14:41 crc kubenswrapper[4792]: I0929 19:14:41.954522 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-8554648995-lp2qs"] Sep 29 19:14:41 crc kubenswrapper[4792]: I0929 19:14:41.954736 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-8554648995-lp2qs" podUID="3e5e32d1-fcfa-4745-a78a-f20d65ff7e2b" containerName="dnsmasq-dns" containerID="cri-o://f8b7efe49035a6d83c4250f01dc999c1afc5a36a178fd26ad6d6e6867f872486" gracePeriod=10 Sep 29 19:14:42 crc kubenswrapper[4792]: I0929 19:14:42.426717 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8554648995-lp2qs" Sep 29 19:14:42 crc kubenswrapper[4792]: I0929 19:14:42.501252 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/3e5e32d1-fcfa-4745-a78a-f20d65ff7e2b-dns-svc\") pod \"3e5e32d1-fcfa-4745-a78a-f20d65ff7e2b\" (UID: \"3e5e32d1-fcfa-4745-a78a-f20d65ff7e2b\") " Sep 29 19:14:42 crc kubenswrapper[4792]: I0929 19:14:42.501311 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/3e5e32d1-fcfa-4745-a78a-f20d65ff7e2b-ovsdbserver-sb\") pod \"3e5e32d1-fcfa-4745-a78a-f20d65ff7e2b\" (UID: \"3e5e32d1-fcfa-4745-a78a-f20d65ff7e2b\") " Sep 29 19:14:42 crc kubenswrapper[4792]: I0929 19:14:42.501396 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3e5e32d1-fcfa-4745-a78a-f20d65ff7e2b-config\") pod \"3e5e32d1-fcfa-4745-a78a-f20d65ff7e2b\" (UID: \"3e5e32d1-fcfa-4745-a78a-f20d65ff7e2b\") " Sep 29 19:14:42 crc kubenswrapper[4792]: I0929 19:14:42.501429 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/3e5e32d1-fcfa-4745-a78a-f20d65ff7e2b-ovsdbserver-nb\") pod \"3e5e32d1-fcfa-4745-a78a-f20d65ff7e2b\" (UID: \"3e5e32d1-fcfa-4745-a78a-f20d65ff7e2b\") " Sep 29 19:14:42 crc kubenswrapper[4792]: I0929 19:14:42.501532 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-k6v55\" (UniqueName: \"kubernetes.io/projected/3e5e32d1-fcfa-4745-a78a-f20d65ff7e2b-kube-api-access-k6v55\") pod \"3e5e32d1-fcfa-4745-a78a-f20d65ff7e2b\" (UID: \"3e5e32d1-fcfa-4745-a78a-f20d65ff7e2b\") " Sep 29 19:14:42 crc kubenswrapper[4792]: I0929 19:14:42.520391 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3e5e32d1-fcfa-4745-a78a-f20d65ff7e2b-kube-api-access-k6v55" (OuterVolumeSpecName: "kube-api-access-k6v55") pod "3e5e32d1-fcfa-4745-a78a-f20d65ff7e2b" (UID: "3e5e32d1-fcfa-4745-a78a-f20d65ff7e2b"). InnerVolumeSpecName "kube-api-access-k6v55". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:14:42 crc kubenswrapper[4792]: I0929 19:14:42.605906 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-k6v55\" (UniqueName: \"kubernetes.io/projected/3e5e32d1-fcfa-4745-a78a-f20d65ff7e2b-kube-api-access-k6v55\") on node \"crc\" DevicePath \"\"" Sep 29 19:14:42 crc kubenswrapper[4792]: I0929 19:14:42.636573 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3e5e32d1-fcfa-4745-a78a-f20d65ff7e2b-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "3e5e32d1-fcfa-4745-a78a-f20d65ff7e2b" (UID: "3e5e32d1-fcfa-4745-a78a-f20d65ff7e2b"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:14:42 crc kubenswrapper[4792]: I0929 19:14:42.645335 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3e5e32d1-fcfa-4745-a78a-f20d65ff7e2b-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "3e5e32d1-fcfa-4745-a78a-f20d65ff7e2b" (UID: "3e5e32d1-fcfa-4745-a78a-f20d65ff7e2b"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:14:42 crc kubenswrapper[4792]: I0929 19:14:42.649335 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3e5e32d1-fcfa-4745-a78a-f20d65ff7e2b-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "3e5e32d1-fcfa-4745-a78a-f20d65ff7e2b" (UID: "3e5e32d1-fcfa-4745-a78a-f20d65ff7e2b"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:14:42 crc kubenswrapper[4792]: I0929 19:14:42.654318 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3e5e32d1-fcfa-4745-a78a-f20d65ff7e2b-config" (OuterVolumeSpecName: "config") pod "3e5e32d1-fcfa-4745-a78a-f20d65ff7e2b" (UID: "3e5e32d1-fcfa-4745-a78a-f20d65ff7e2b"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:14:42 crc kubenswrapper[4792]: I0929 19:14:42.707059 4792 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3e5e32d1-fcfa-4745-a78a-f20d65ff7e2b-config\") on node \"crc\" DevicePath \"\"" Sep 29 19:14:42 crc kubenswrapper[4792]: I0929 19:14:42.707084 4792 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/3e5e32d1-fcfa-4745-a78a-f20d65ff7e2b-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Sep 29 19:14:42 crc kubenswrapper[4792]: I0929 19:14:42.707095 4792 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/3e5e32d1-fcfa-4745-a78a-f20d65ff7e2b-dns-svc\") on node \"crc\" DevicePath \"\"" Sep 29 19:14:42 crc kubenswrapper[4792]: I0929 19:14:42.707104 4792 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/3e5e32d1-fcfa-4745-a78a-f20d65ff7e2b-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Sep 29 19:14:42 crc kubenswrapper[4792]: I0929 19:14:42.796115 4792 generic.go:334] "Generic (PLEG): container finished" podID="3e5e32d1-fcfa-4745-a78a-f20d65ff7e2b" containerID="f8b7efe49035a6d83c4250f01dc999c1afc5a36a178fd26ad6d6e6867f872486" exitCode=0 Sep 29 19:14:42 crc kubenswrapper[4792]: I0929 19:14:42.796164 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8554648995-lp2qs" Sep 29 19:14:42 crc kubenswrapper[4792]: I0929 19:14:42.796227 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8554648995-lp2qs" event={"ID":"3e5e32d1-fcfa-4745-a78a-f20d65ff7e2b","Type":"ContainerDied","Data":"f8b7efe49035a6d83c4250f01dc999c1afc5a36a178fd26ad6d6e6867f872486"} Sep 29 19:14:42 crc kubenswrapper[4792]: I0929 19:14:42.796268 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8554648995-lp2qs" event={"ID":"3e5e32d1-fcfa-4745-a78a-f20d65ff7e2b","Type":"ContainerDied","Data":"eb53a866ca28fe9f4626821a1c6b4a4d4b99ce88bb3cdec8bb199372656c8e29"} Sep 29 19:14:42 crc kubenswrapper[4792]: I0929 19:14:42.796291 4792 scope.go:117] "RemoveContainer" containerID="f8b7efe49035a6d83c4250f01dc999c1afc5a36a178fd26ad6d6e6867f872486" Sep 29 19:14:42 crc kubenswrapper[4792]: I0929 19:14:42.832301 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-8554648995-lp2qs"] Sep 29 19:14:42 crc kubenswrapper[4792]: I0929 19:14:42.840333 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-8554648995-lp2qs"] Sep 29 19:14:42 crc kubenswrapper[4792]: I0929 19:14:42.846737 4792 scope.go:117] "RemoveContainer" containerID="6e94e56bd5863597fb8b0b93b26ada129a9ca41c7deb36757db5907f284876b6" Sep 29 19:14:42 crc kubenswrapper[4792]: I0929 19:14:42.868245 4792 scope.go:117] "RemoveContainer" containerID="f8b7efe49035a6d83c4250f01dc999c1afc5a36a178fd26ad6d6e6867f872486" Sep 29 19:14:42 crc kubenswrapper[4792]: E0929 19:14:42.868744 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f8b7efe49035a6d83c4250f01dc999c1afc5a36a178fd26ad6d6e6867f872486\": container with ID starting with f8b7efe49035a6d83c4250f01dc999c1afc5a36a178fd26ad6d6e6867f872486 not found: ID does not exist" containerID="f8b7efe49035a6d83c4250f01dc999c1afc5a36a178fd26ad6d6e6867f872486" Sep 29 19:14:42 crc kubenswrapper[4792]: I0929 19:14:42.868787 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f8b7efe49035a6d83c4250f01dc999c1afc5a36a178fd26ad6d6e6867f872486"} err="failed to get container status \"f8b7efe49035a6d83c4250f01dc999c1afc5a36a178fd26ad6d6e6867f872486\": rpc error: code = NotFound desc = could not find container \"f8b7efe49035a6d83c4250f01dc999c1afc5a36a178fd26ad6d6e6867f872486\": container with ID starting with f8b7efe49035a6d83c4250f01dc999c1afc5a36a178fd26ad6d6e6867f872486 not found: ID does not exist" Sep 29 19:14:42 crc kubenswrapper[4792]: I0929 19:14:42.868815 4792 scope.go:117] "RemoveContainer" containerID="6e94e56bd5863597fb8b0b93b26ada129a9ca41c7deb36757db5907f284876b6" Sep 29 19:14:42 crc kubenswrapper[4792]: E0929 19:14:42.869267 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6e94e56bd5863597fb8b0b93b26ada129a9ca41c7deb36757db5907f284876b6\": container with ID starting with 6e94e56bd5863597fb8b0b93b26ada129a9ca41c7deb36757db5907f284876b6 not found: ID does not exist" containerID="6e94e56bd5863597fb8b0b93b26ada129a9ca41c7deb36757db5907f284876b6" Sep 29 19:14:42 crc kubenswrapper[4792]: I0929 19:14:42.869298 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6e94e56bd5863597fb8b0b93b26ada129a9ca41c7deb36757db5907f284876b6"} err="failed to get container status \"6e94e56bd5863597fb8b0b93b26ada129a9ca41c7deb36757db5907f284876b6\": rpc error: code = NotFound desc = could not find container \"6e94e56bd5863597fb8b0b93b26ada129a9ca41c7deb36757db5907f284876b6\": container with ID starting with 6e94e56bd5863597fb8b0b93b26ada129a9ca41c7deb36757db5907f284876b6 not found: ID does not exist" Sep 29 19:14:43 crc kubenswrapper[4792]: I0929 19:14:43.026069 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3e5e32d1-fcfa-4745-a78a-f20d65ff7e2b" path="/var/lib/kubelet/pods/3e5e32d1-fcfa-4745-a78a-f20d65ff7e2b/volumes" Sep 29 19:14:43 crc kubenswrapper[4792]: I0929 19:14:43.280022 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-4622-account-create-kqskg" Sep 29 19:14:43 crc kubenswrapper[4792]: I0929 19:14:43.285245 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-019a-account-create-22pm5" Sep 29 19:14:43 crc kubenswrapper[4792]: I0929 19:14:43.292566 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-5qvbx" Sep 29 19:14:43 crc kubenswrapper[4792]: I0929 19:14:43.318762 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-h7z9s\" (UniqueName: \"kubernetes.io/projected/eb43922b-132e-4ce3-8004-d0fddc9e7c80-kube-api-access-h7z9s\") pod \"eb43922b-132e-4ce3-8004-d0fddc9e7c80\" (UID: \"eb43922b-132e-4ce3-8004-d0fddc9e7c80\") " Sep 29 19:14:43 crc kubenswrapper[4792]: I0929 19:14:43.318803 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-57qlr\" (UniqueName: \"kubernetes.io/projected/42ef9ed2-647c-4dd1-aca8-625f68ad7a15-kube-api-access-57qlr\") pod \"42ef9ed2-647c-4dd1-aca8-625f68ad7a15\" (UID: \"42ef9ed2-647c-4dd1-aca8-625f68ad7a15\") " Sep 29 19:14:43 crc kubenswrapper[4792]: I0929 19:14:43.318942 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zcftx\" (UniqueName: \"kubernetes.io/projected/f309741e-5733-4b16-ba3b-f354fca03459-kube-api-access-zcftx\") pod \"f309741e-5733-4b16-ba3b-f354fca03459\" (UID: \"f309741e-5733-4b16-ba3b-f354fca03459\") " Sep 29 19:14:43 crc kubenswrapper[4792]: I0929 19:14:43.324800 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/eb43922b-132e-4ce3-8004-d0fddc9e7c80-kube-api-access-h7z9s" (OuterVolumeSpecName: "kube-api-access-h7z9s") pod "eb43922b-132e-4ce3-8004-d0fddc9e7c80" (UID: "eb43922b-132e-4ce3-8004-d0fddc9e7c80"). InnerVolumeSpecName "kube-api-access-h7z9s". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:14:43 crc kubenswrapper[4792]: I0929 19:14:43.324912 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f309741e-5733-4b16-ba3b-f354fca03459-kube-api-access-zcftx" (OuterVolumeSpecName: "kube-api-access-zcftx") pod "f309741e-5733-4b16-ba3b-f354fca03459" (UID: "f309741e-5733-4b16-ba3b-f354fca03459"). InnerVolumeSpecName "kube-api-access-zcftx". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:14:43 crc kubenswrapper[4792]: I0929 19:14:43.324970 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/42ef9ed2-647c-4dd1-aca8-625f68ad7a15-kube-api-access-57qlr" (OuterVolumeSpecName: "kube-api-access-57qlr") pod "42ef9ed2-647c-4dd1-aca8-625f68ad7a15" (UID: "42ef9ed2-647c-4dd1-aca8-625f68ad7a15"). InnerVolumeSpecName "kube-api-access-57qlr". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:14:43 crc kubenswrapper[4792]: I0929 19:14:43.420446 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-h7z9s\" (UniqueName: \"kubernetes.io/projected/eb43922b-132e-4ce3-8004-d0fddc9e7c80-kube-api-access-h7z9s\") on node \"crc\" DevicePath \"\"" Sep 29 19:14:43 crc kubenswrapper[4792]: I0929 19:14:43.420474 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-57qlr\" (UniqueName: \"kubernetes.io/projected/42ef9ed2-647c-4dd1-aca8-625f68ad7a15-kube-api-access-57qlr\") on node \"crc\" DevicePath \"\"" Sep 29 19:14:43 crc kubenswrapper[4792]: I0929 19:14:43.420485 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zcftx\" (UniqueName: \"kubernetes.io/projected/f309741e-5733-4b16-ba3b-f354fca03459-kube-api-access-zcftx\") on node \"crc\" DevicePath \"\"" Sep 29 19:14:43 crc kubenswrapper[4792]: I0929 19:14:43.812924 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-5qvbx" event={"ID":"eb43922b-132e-4ce3-8004-d0fddc9e7c80","Type":"ContainerDied","Data":"11828ed853d9dc2f1fb7d7db9cb31d025dcb764eab7836504f0aea85b91424fd"} Sep 29 19:14:43 crc kubenswrapper[4792]: I0929 19:14:43.812966 4792 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="11828ed853d9dc2f1fb7d7db9cb31d025dcb764eab7836504f0aea85b91424fd" Sep 29 19:14:43 crc kubenswrapper[4792]: I0929 19:14:43.812987 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-5qvbx" Sep 29 19:14:43 crc kubenswrapper[4792]: I0929 19:14:43.817433 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-4622-account-create-kqskg" Sep 29 19:14:43 crc kubenswrapper[4792]: I0929 19:14:43.818597 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-4622-account-create-kqskg" event={"ID":"f309741e-5733-4b16-ba3b-f354fca03459","Type":"ContainerDied","Data":"551c513348342cbbb6de9522d84acf2a91281cc61f0b55e29f22495513a81231"} Sep 29 19:14:43 crc kubenswrapper[4792]: I0929 19:14:43.818956 4792 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="551c513348342cbbb6de9522d84acf2a91281cc61f0b55e29f22495513a81231" Sep 29 19:14:43 crc kubenswrapper[4792]: I0929 19:14:43.821024 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-019a-account-create-22pm5" event={"ID":"42ef9ed2-647c-4dd1-aca8-625f68ad7a15","Type":"ContainerDied","Data":"3ea7e73d8bbbc3437f6c161e89bf2153b0839018c8beef3d38afc2383c9f6f3b"} Sep 29 19:14:43 crc kubenswrapper[4792]: I0929 19:14:43.821067 4792 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3ea7e73d8bbbc3437f6c161e89bf2153b0839018c8beef3d38afc2383c9f6f3b" Sep 29 19:14:43 crc kubenswrapper[4792]: I0929 19:14:43.821041 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-019a-account-create-22pm5" Sep 29 19:14:43 crc kubenswrapper[4792]: I0929 19:14:43.822913 4792 generic.go:334] "Generic (PLEG): container finished" podID="cf5405ae-97dd-404d-9b0c-4d0faaf961cb" containerID="14e4cd7eecd90fb1cd3ec718807f43255f22d3aea6c40d065dfa59e4cfaa29fd" exitCode=0 Sep 29 19:14:43 crc kubenswrapper[4792]: I0929 19:14:43.823004 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"cf5405ae-97dd-404d-9b0c-4d0faaf961cb","Type":"ContainerDied","Data":"14e4cd7eecd90fb1cd3ec718807f43255f22d3aea6c40d065dfa59e4cfaa29fd"} Sep 29 19:14:44 crc kubenswrapper[4792]: I0929 19:14:44.835522 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"cf5405ae-97dd-404d-9b0c-4d0faaf961cb","Type":"ContainerStarted","Data":"512a2a6186a745534cc73538ff5a9222d0525e9cef117403944e068c5c4647f5"} Sep 29 19:14:44 crc kubenswrapper[4792]: I0929 19:14:44.836082 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-server-0" Sep 29 19:14:44 crc kubenswrapper[4792]: I0929 19:14:44.871646 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-server-0" podStartSLOduration=34.748827267 podStartE2EDuration="1m10.871622569s" podCreationTimestamp="2025-09-29 19:13:34 +0000 UTC" firstStartedPulling="2025-09-29 19:13:36.423170482 +0000 UTC m=+1028.416477878" lastFinishedPulling="2025-09-29 19:14:12.545965784 +0000 UTC m=+1064.539273180" observedRunningTime="2025-09-29 19:14:44.862987681 +0000 UTC m=+1096.856295087" watchObservedRunningTime="2025-09-29 19:14:44.871622569 +0000 UTC m=+1096.864929965" Sep 29 19:14:44 crc kubenswrapper[4792]: I0929 19:14:44.929213 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-db-sync-hr5x7"] Sep 29 19:14:44 crc kubenswrapper[4792]: E0929 19:14:44.929530 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3e5e32d1-fcfa-4745-a78a-f20d65ff7e2b" containerName="init" Sep 29 19:14:44 crc kubenswrapper[4792]: I0929 19:14:44.929545 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="3e5e32d1-fcfa-4745-a78a-f20d65ff7e2b" containerName="init" Sep 29 19:14:44 crc kubenswrapper[4792]: E0929 19:14:44.929564 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="eb43922b-132e-4ce3-8004-d0fddc9e7c80" containerName="mariadb-database-create" Sep 29 19:14:44 crc kubenswrapper[4792]: I0929 19:14:44.929570 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="eb43922b-132e-4ce3-8004-d0fddc9e7c80" containerName="mariadb-database-create" Sep 29 19:14:44 crc kubenswrapper[4792]: E0929 19:14:44.929580 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="42ef9ed2-647c-4dd1-aca8-625f68ad7a15" containerName="mariadb-account-create" Sep 29 19:14:44 crc kubenswrapper[4792]: I0929 19:14:44.929586 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="42ef9ed2-647c-4dd1-aca8-625f68ad7a15" containerName="mariadb-account-create" Sep 29 19:14:44 crc kubenswrapper[4792]: E0929 19:14:44.929601 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3e5e32d1-fcfa-4745-a78a-f20d65ff7e2b" containerName="dnsmasq-dns" Sep 29 19:14:44 crc kubenswrapper[4792]: I0929 19:14:44.929607 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="3e5e32d1-fcfa-4745-a78a-f20d65ff7e2b" containerName="dnsmasq-dns" Sep 29 19:14:44 crc kubenswrapper[4792]: E0929 19:14:44.929621 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f309741e-5733-4b16-ba3b-f354fca03459" containerName="mariadb-account-create" Sep 29 19:14:44 crc kubenswrapper[4792]: I0929 19:14:44.929627 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="f309741e-5733-4b16-ba3b-f354fca03459" containerName="mariadb-account-create" Sep 29 19:14:44 crc kubenswrapper[4792]: I0929 19:14:44.929769 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="3e5e32d1-fcfa-4745-a78a-f20d65ff7e2b" containerName="dnsmasq-dns" Sep 29 19:14:44 crc kubenswrapper[4792]: I0929 19:14:44.929785 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="f309741e-5733-4b16-ba3b-f354fca03459" containerName="mariadb-account-create" Sep 29 19:14:44 crc kubenswrapper[4792]: I0929 19:14:44.929795 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="42ef9ed2-647c-4dd1-aca8-625f68ad7a15" containerName="mariadb-account-create" Sep 29 19:14:44 crc kubenswrapper[4792]: I0929 19:14:44.929803 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="eb43922b-132e-4ce3-8004-d0fddc9e7c80" containerName="mariadb-database-create" Sep 29 19:14:44 crc kubenswrapper[4792]: I0929 19:14:44.930299 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-hr5x7" Sep 29 19:14:44 crc kubenswrapper[4792]: I0929 19:14:44.932755 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-hgxkm" Sep 29 19:14:44 crc kubenswrapper[4792]: I0929 19:14:44.932861 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-config-data" Sep 29 19:14:44 crc kubenswrapper[4792]: I0929 19:14:44.939747 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-sync-hr5x7"] Sep 29 19:14:45 crc kubenswrapper[4792]: I0929 19:14:45.044190 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/f52c7bda-8340-49b2-9f94-2767ab141a81-db-sync-config-data\") pod \"glance-db-sync-hr5x7\" (UID: \"f52c7bda-8340-49b2-9f94-2767ab141a81\") " pod="openstack/glance-db-sync-hr5x7" Sep 29 19:14:45 crc kubenswrapper[4792]: I0929 19:14:45.044256 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f52c7bda-8340-49b2-9f94-2767ab141a81-combined-ca-bundle\") pod \"glance-db-sync-hr5x7\" (UID: \"f52c7bda-8340-49b2-9f94-2767ab141a81\") " pod="openstack/glance-db-sync-hr5x7" Sep 29 19:14:45 crc kubenswrapper[4792]: I0929 19:14:45.044280 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f52c7bda-8340-49b2-9f94-2767ab141a81-config-data\") pod \"glance-db-sync-hr5x7\" (UID: \"f52c7bda-8340-49b2-9f94-2767ab141a81\") " pod="openstack/glance-db-sync-hr5x7" Sep 29 19:14:45 crc kubenswrapper[4792]: I0929 19:14:45.044305 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p5b9z\" (UniqueName: \"kubernetes.io/projected/f52c7bda-8340-49b2-9f94-2767ab141a81-kube-api-access-p5b9z\") pod \"glance-db-sync-hr5x7\" (UID: \"f52c7bda-8340-49b2-9f94-2767ab141a81\") " pod="openstack/glance-db-sync-hr5x7" Sep 29 19:14:45 crc kubenswrapper[4792]: I0929 19:14:45.145452 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/f52c7bda-8340-49b2-9f94-2767ab141a81-db-sync-config-data\") pod \"glance-db-sync-hr5x7\" (UID: \"f52c7bda-8340-49b2-9f94-2767ab141a81\") " pod="openstack/glance-db-sync-hr5x7" Sep 29 19:14:45 crc kubenswrapper[4792]: I0929 19:14:45.145529 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f52c7bda-8340-49b2-9f94-2767ab141a81-combined-ca-bundle\") pod \"glance-db-sync-hr5x7\" (UID: \"f52c7bda-8340-49b2-9f94-2767ab141a81\") " pod="openstack/glance-db-sync-hr5x7" Sep 29 19:14:45 crc kubenswrapper[4792]: I0929 19:14:45.145555 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f52c7bda-8340-49b2-9f94-2767ab141a81-config-data\") pod \"glance-db-sync-hr5x7\" (UID: \"f52c7bda-8340-49b2-9f94-2767ab141a81\") " pod="openstack/glance-db-sync-hr5x7" Sep 29 19:14:45 crc kubenswrapper[4792]: I0929 19:14:45.145585 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p5b9z\" (UniqueName: \"kubernetes.io/projected/f52c7bda-8340-49b2-9f94-2767ab141a81-kube-api-access-p5b9z\") pod \"glance-db-sync-hr5x7\" (UID: \"f52c7bda-8340-49b2-9f94-2767ab141a81\") " pod="openstack/glance-db-sync-hr5x7" Sep 29 19:14:45 crc kubenswrapper[4792]: I0929 19:14:45.149629 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/f52c7bda-8340-49b2-9f94-2767ab141a81-db-sync-config-data\") pod \"glance-db-sync-hr5x7\" (UID: \"f52c7bda-8340-49b2-9f94-2767ab141a81\") " pod="openstack/glance-db-sync-hr5x7" Sep 29 19:14:45 crc kubenswrapper[4792]: I0929 19:14:45.154598 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f52c7bda-8340-49b2-9f94-2767ab141a81-combined-ca-bundle\") pod \"glance-db-sync-hr5x7\" (UID: \"f52c7bda-8340-49b2-9f94-2767ab141a81\") " pod="openstack/glance-db-sync-hr5x7" Sep 29 19:14:45 crc kubenswrapper[4792]: I0929 19:14:45.164223 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f52c7bda-8340-49b2-9f94-2767ab141a81-config-data\") pod \"glance-db-sync-hr5x7\" (UID: \"f52c7bda-8340-49b2-9f94-2767ab141a81\") " pod="openstack/glance-db-sync-hr5x7" Sep 29 19:14:45 crc kubenswrapper[4792]: I0929 19:14:45.171434 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p5b9z\" (UniqueName: \"kubernetes.io/projected/f52c7bda-8340-49b2-9f94-2767ab141a81-kube-api-access-p5b9z\") pod \"glance-db-sync-hr5x7\" (UID: \"f52c7bda-8340-49b2-9f94-2767ab141a81\") " pod="openstack/glance-db-sync-hr5x7" Sep 29 19:14:45 crc kubenswrapper[4792]: I0929 19:14:45.264670 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-hr5x7" Sep 29 19:14:45 crc kubenswrapper[4792]: I0929 19:14:45.844250 4792 generic.go:334] "Generic (PLEG): container finished" podID="62bc84b7-9b21-447c-b1c3-21c4f178ba26" containerID="b824c6459e83975ad329f7367ab1eeb34ec3ddd56a5772f86a8492a07ef970d8" exitCode=0 Sep 29 19:14:45 crc kubenswrapper[4792]: I0929 19:14:45.844324 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"62bc84b7-9b21-447c-b1c3-21c4f178ba26","Type":"ContainerDied","Data":"b824c6459e83975ad329f7367ab1eeb34ec3ddd56a5772f86a8492a07ef970d8"} Sep 29 19:14:45 crc kubenswrapper[4792]: I0929 19:14:45.853994 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-sync-hr5x7"] Sep 29 19:14:46 crc kubenswrapper[4792]: I0929 19:14:46.224527 4792 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ovn-controller-zvckm" podUID="321cc22b-3e6d-429f-aba5-d69c973d889e" containerName="ovn-controller" probeResult="failure" output=< Sep 29 19:14:46 crc kubenswrapper[4792]: ERROR - ovn-controller connection status is 'not connected', expecting 'connected' status Sep 29 19:14:46 crc kubenswrapper[4792]: > Sep 29 19:14:46 crc kubenswrapper[4792]: I0929 19:14:46.311954 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-ovs-mh2vn" Sep 29 19:14:46 crc kubenswrapper[4792]: I0929 19:14:46.315318 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-ovs-mh2vn" Sep 29 19:14:46 crc kubenswrapper[4792]: I0929 19:14:46.539215 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-zvckm-config-544zc"] Sep 29 19:14:46 crc kubenswrapper[4792]: I0929 19:14:46.540542 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-zvckm-config-544zc" Sep 29 19:14:46 crc kubenswrapper[4792]: I0929 19:14:46.545086 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-extra-scripts" Sep 29 19:14:46 crc kubenswrapper[4792]: I0929 19:14:46.562831 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-zvckm-config-544zc"] Sep 29 19:14:46 crc kubenswrapper[4792]: I0929 19:14:46.669135 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/2ce7b034-fc6b-489c-bbec-999e7b4d3099-var-run\") pod \"ovn-controller-zvckm-config-544zc\" (UID: \"2ce7b034-fc6b-489c-bbec-999e7b4d3099\") " pod="openstack/ovn-controller-zvckm-config-544zc" Sep 29 19:14:46 crc kubenswrapper[4792]: I0929 19:14:46.669174 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/2ce7b034-fc6b-489c-bbec-999e7b4d3099-scripts\") pod \"ovn-controller-zvckm-config-544zc\" (UID: \"2ce7b034-fc6b-489c-bbec-999e7b4d3099\") " pod="openstack/ovn-controller-zvckm-config-544zc" Sep 29 19:14:46 crc kubenswrapper[4792]: I0929 19:14:46.669372 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/2ce7b034-fc6b-489c-bbec-999e7b4d3099-var-log-ovn\") pod \"ovn-controller-zvckm-config-544zc\" (UID: \"2ce7b034-fc6b-489c-bbec-999e7b4d3099\") " pod="openstack/ovn-controller-zvckm-config-544zc" Sep 29 19:14:46 crc kubenswrapper[4792]: I0929 19:14:46.669427 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ldcnb\" (UniqueName: \"kubernetes.io/projected/2ce7b034-fc6b-489c-bbec-999e7b4d3099-kube-api-access-ldcnb\") pod \"ovn-controller-zvckm-config-544zc\" (UID: \"2ce7b034-fc6b-489c-bbec-999e7b4d3099\") " pod="openstack/ovn-controller-zvckm-config-544zc" Sep 29 19:14:46 crc kubenswrapper[4792]: I0929 19:14:46.669460 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/2ce7b034-fc6b-489c-bbec-999e7b4d3099-additional-scripts\") pod \"ovn-controller-zvckm-config-544zc\" (UID: \"2ce7b034-fc6b-489c-bbec-999e7b4d3099\") " pod="openstack/ovn-controller-zvckm-config-544zc" Sep 29 19:14:46 crc kubenswrapper[4792]: I0929 19:14:46.669511 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/2ce7b034-fc6b-489c-bbec-999e7b4d3099-var-run-ovn\") pod \"ovn-controller-zvckm-config-544zc\" (UID: \"2ce7b034-fc6b-489c-bbec-999e7b4d3099\") " pod="openstack/ovn-controller-zvckm-config-544zc" Sep 29 19:14:46 crc kubenswrapper[4792]: I0929 19:14:46.771486 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/2ce7b034-fc6b-489c-bbec-999e7b4d3099-var-log-ovn\") pod \"ovn-controller-zvckm-config-544zc\" (UID: \"2ce7b034-fc6b-489c-bbec-999e7b4d3099\") " pod="openstack/ovn-controller-zvckm-config-544zc" Sep 29 19:14:46 crc kubenswrapper[4792]: I0929 19:14:46.771586 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ldcnb\" (UniqueName: \"kubernetes.io/projected/2ce7b034-fc6b-489c-bbec-999e7b4d3099-kube-api-access-ldcnb\") pod \"ovn-controller-zvckm-config-544zc\" (UID: \"2ce7b034-fc6b-489c-bbec-999e7b4d3099\") " pod="openstack/ovn-controller-zvckm-config-544zc" Sep 29 19:14:46 crc kubenswrapper[4792]: I0929 19:14:46.771637 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/2ce7b034-fc6b-489c-bbec-999e7b4d3099-additional-scripts\") pod \"ovn-controller-zvckm-config-544zc\" (UID: \"2ce7b034-fc6b-489c-bbec-999e7b4d3099\") " pod="openstack/ovn-controller-zvckm-config-544zc" Sep 29 19:14:46 crc kubenswrapper[4792]: I0929 19:14:46.771682 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/2ce7b034-fc6b-489c-bbec-999e7b4d3099-var-run-ovn\") pod \"ovn-controller-zvckm-config-544zc\" (UID: \"2ce7b034-fc6b-489c-bbec-999e7b4d3099\") " pod="openstack/ovn-controller-zvckm-config-544zc" Sep 29 19:14:46 crc kubenswrapper[4792]: I0929 19:14:46.771731 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/2ce7b034-fc6b-489c-bbec-999e7b4d3099-var-run\") pod \"ovn-controller-zvckm-config-544zc\" (UID: \"2ce7b034-fc6b-489c-bbec-999e7b4d3099\") " pod="openstack/ovn-controller-zvckm-config-544zc" Sep 29 19:14:46 crc kubenswrapper[4792]: I0929 19:14:46.771754 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/2ce7b034-fc6b-489c-bbec-999e7b4d3099-scripts\") pod \"ovn-controller-zvckm-config-544zc\" (UID: \"2ce7b034-fc6b-489c-bbec-999e7b4d3099\") " pod="openstack/ovn-controller-zvckm-config-544zc" Sep 29 19:14:46 crc kubenswrapper[4792]: I0929 19:14:46.771987 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/2ce7b034-fc6b-489c-bbec-999e7b4d3099-var-run-ovn\") pod \"ovn-controller-zvckm-config-544zc\" (UID: \"2ce7b034-fc6b-489c-bbec-999e7b4d3099\") " pod="openstack/ovn-controller-zvckm-config-544zc" Sep 29 19:14:46 crc kubenswrapper[4792]: I0929 19:14:46.772025 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/2ce7b034-fc6b-489c-bbec-999e7b4d3099-var-run\") pod \"ovn-controller-zvckm-config-544zc\" (UID: \"2ce7b034-fc6b-489c-bbec-999e7b4d3099\") " pod="openstack/ovn-controller-zvckm-config-544zc" Sep 29 19:14:46 crc kubenswrapper[4792]: I0929 19:14:46.772644 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/2ce7b034-fc6b-489c-bbec-999e7b4d3099-additional-scripts\") pod \"ovn-controller-zvckm-config-544zc\" (UID: \"2ce7b034-fc6b-489c-bbec-999e7b4d3099\") " pod="openstack/ovn-controller-zvckm-config-544zc" Sep 29 19:14:46 crc kubenswrapper[4792]: I0929 19:14:46.772724 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/2ce7b034-fc6b-489c-bbec-999e7b4d3099-var-log-ovn\") pod \"ovn-controller-zvckm-config-544zc\" (UID: \"2ce7b034-fc6b-489c-bbec-999e7b4d3099\") " pod="openstack/ovn-controller-zvckm-config-544zc" Sep 29 19:14:46 crc kubenswrapper[4792]: I0929 19:14:46.774102 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/2ce7b034-fc6b-489c-bbec-999e7b4d3099-scripts\") pod \"ovn-controller-zvckm-config-544zc\" (UID: \"2ce7b034-fc6b-489c-bbec-999e7b4d3099\") " pod="openstack/ovn-controller-zvckm-config-544zc" Sep 29 19:14:46 crc kubenswrapper[4792]: I0929 19:14:46.819377 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ldcnb\" (UniqueName: \"kubernetes.io/projected/2ce7b034-fc6b-489c-bbec-999e7b4d3099-kube-api-access-ldcnb\") pod \"ovn-controller-zvckm-config-544zc\" (UID: \"2ce7b034-fc6b-489c-bbec-999e7b4d3099\") " pod="openstack/ovn-controller-zvckm-config-544zc" Sep 29 19:14:46 crc kubenswrapper[4792]: I0929 19:14:46.857108 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-hr5x7" event={"ID":"f52c7bda-8340-49b2-9f94-2767ab141a81","Type":"ContainerStarted","Data":"44ac9356848c540300c27ef07e2e8e27fb43849bcab55a5c90623aeddce9afcc"} Sep 29 19:14:46 crc kubenswrapper[4792]: I0929 19:14:46.859837 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-zvckm-config-544zc" Sep 29 19:14:46 crc kubenswrapper[4792]: I0929 19:14:46.868356 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"62bc84b7-9b21-447c-b1c3-21c4f178ba26","Type":"ContainerStarted","Data":"b526a1929b86be5299fbda31deae78e3be086c29bf1ddad0937a6ccfb383e8a0"} Sep 29 19:14:46 crc kubenswrapper[4792]: I0929 19:14:46.869354 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-cell1-server-0" Sep 29 19:14:46 crc kubenswrapper[4792]: I0929 19:14:46.909312 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-cell1-server-0" podStartSLOduration=-9223371963.945482 podStartE2EDuration="1m12.909294375s" podCreationTimestamp="2025-09-29 19:13:34 +0000 UTC" firstStartedPulling="2025-09-29 19:13:36.765813385 +0000 UTC m=+1028.759120781" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 19:14:46.904965732 +0000 UTC m=+1098.898273148" watchObservedRunningTime="2025-09-29 19:14:46.909294375 +0000 UTC m=+1098.902601771" Sep 29 19:14:47 crc kubenswrapper[4792]: I0929 19:14:47.256054 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-zvckm-config-544zc"] Sep 29 19:14:47 crc kubenswrapper[4792]: I0929 19:14:47.895644 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-zvckm-config-544zc" event={"ID":"2ce7b034-fc6b-489c-bbec-999e7b4d3099","Type":"ContainerStarted","Data":"60de474f64d8ceab7bd04a696c276c4daae767384d55d5faf43ab7a04cb6e95a"} Sep 29 19:14:47 crc kubenswrapper[4792]: I0929 19:14:47.896055 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-zvckm-config-544zc" event={"ID":"2ce7b034-fc6b-489c-bbec-999e7b4d3099","Type":"ContainerStarted","Data":"5d4390f81d76112aade079d84085763652ace954f52760e23f3ddead484007b8"} Sep 29 19:14:47 crc kubenswrapper[4792]: I0929 19:14:47.919074 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-zvckm-config-544zc" podStartSLOduration=1.91904211 podStartE2EDuration="1.91904211s" podCreationTimestamp="2025-09-29 19:14:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 19:14:47.913156666 +0000 UTC m=+1099.906464062" watchObservedRunningTime="2025-09-29 19:14:47.91904211 +0000 UTC m=+1099.912349506" Sep 29 19:14:48 crc kubenswrapper[4792]: I0929 19:14:48.722020 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/cd33a904-c32b-4781-b3fe-53d903764497-etc-swift\") pod \"swift-storage-0\" (UID: \"cd33a904-c32b-4781-b3fe-53d903764497\") " pod="openstack/swift-storage-0" Sep 29 19:14:48 crc kubenswrapper[4792]: E0929 19:14:48.722174 4792 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Sep 29 19:14:48 crc kubenswrapper[4792]: E0929 19:14:48.722586 4792 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Sep 29 19:14:48 crc kubenswrapper[4792]: E0929 19:14:48.722647 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/cd33a904-c32b-4781-b3fe-53d903764497-etc-swift podName:cd33a904-c32b-4781-b3fe-53d903764497 nodeName:}" failed. No retries permitted until 2025-09-29 19:15:04.722626009 +0000 UTC m=+1116.715933405 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/cd33a904-c32b-4781-b3fe-53d903764497-etc-swift") pod "swift-storage-0" (UID: "cd33a904-c32b-4781-b3fe-53d903764497") : configmap "swift-ring-files" not found Sep 29 19:14:48 crc kubenswrapper[4792]: I0929 19:14:48.903452 4792 generic.go:334] "Generic (PLEG): container finished" podID="2ce7b034-fc6b-489c-bbec-999e7b4d3099" containerID="60de474f64d8ceab7bd04a696c276c4daae767384d55d5faf43ab7a04cb6e95a" exitCode=0 Sep 29 19:14:48 crc kubenswrapper[4792]: I0929 19:14:48.903561 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-zvckm-config-544zc" event={"ID":"2ce7b034-fc6b-489c-bbec-999e7b4d3099","Type":"ContainerDied","Data":"60de474f64d8ceab7bd04a696c276c4daae767384d55d5faf43ab7a04cb6e95a"} Sep 29 19:14:48 crc kubenswrapper[4792]: I0929 19:14:48.906711 4792 generic.go:334] "Generic (PLEG): container finished" podID="654442d0-5361-4c10-b60a-2eb3bcf71acd" containerID="5a34f767230535444f7a64232831389e1afe738befbac46be68e88671408f1cc" exitCode=0 Sep 29 19:14:48 crc kubenswrapper[4792]: I0929 19:14:48.906787 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-scl6k" event={"ID":"654442d0-5361-4c10-b60a-2eb3bcf71acd","Type":"ContainerDied","Data":"5a34f767230535444f7a64232831389e1afe738befbac46be68e88671408f1cc"} Sep 29 19:14:50 crc kubenswrapper[4792]: I0929 19:14:50.377642 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-zvckm-config-544zc" Sep 29 19:14:50 crc kubenswrapper[4792]: I0929 19:14:50.381770 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-scl6k" Sep 29 19:14:50 crc kubenswrapper[4792]: I0929 19:14:50.457157 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/654442d0-5361-4c10-b60a-2eb3bcf71acd-etc-swift\") pod \"654442d0-5361-4c10-b60a-2eb3bcf71acd\" (UID: \"654442d0-5361-4c10-b60a-2eb3bcf71acd\") " Sep 29 19:14:50 crc kubenswrapper[4792]: I0929 19:14:50.457237 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/654442d0-5361-4c10-b60a-2eb3bcf71acd-combined-ca-bundle\") pod \"654442d0-5361-4c10-b60a-2eb3bcf71acd\" (UID: \"654442d0-5361-4c10-b60a-2eb3bcf71acd\") " Sep 29 19:14:50 crc kubenswrapper[4792]: I0929 19:14:50.457271 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/2ce7b034-fc6b-489c-bbec-999e7b4d3099-scripts\") pod \"2ce7b034-fc6b-489c-bbec-999e7b4d3099\" (UID: \"2ce7b034-fc6b-489c-bbec-999e7b4d3099\") " Sep 29 19:14:50 crc kubenswrapper[4792]: I0929 19:14:50.457294 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/2ce7b034-fc6b-489c-bbec-999e7b4d3099-additional-scripts\") pod \"2ce7b034-fc6b-489c-bbec-999e7b4d3099\" (UID: \"2ce7b034-fc6b-489c-bbec-999e7b4d3099\") " Sep 29 19:14:50 crc kubenswrapper[4792]: I0929 19:14:50.457328 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/654442d0-5361-4c10-b60a-2eb3bcf71acd-scripts\") pod \"654442d0-5361-4c10-b60a-2eb3bcf71acd\" (UID: \"654442d0-5361-4c10-b60a-2eb3bcf71acd\") " Sep 29 19:14:50 crc kubenswrapper[4792]: I0929 19:14:50.457357 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lnrz7\" (UniqueName: \"kubernetes.io/projected/654442d0-5361-4c10-b60a-2eb3bcf71acd-kube-api-access-lnrz7\") pod \"654442d0-5361-4c10-b60a-2eb3bcf71acd\" (UID: \"654442d0-5361-4c10-b60a-2eb3bcf71acd\") " Sep 29 19:14:50 crc kubenswrapper[4792]: I0929 19:14:50.457412 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/654442d0-5361-4c10-b60a-2eb3bcf71acd-swiftconf\") pod \"654442d0-5361-4c10-b60a-2eb3bcf71acd\" (UID: \"654442d0-5361-4c10-b60a-2eb3bcf71acd\") " Sep 29 19:14:50 crc kubenswrapper[4792]: I0929 19:14:50.457459 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ldcnb\" (UniqueName: \"kubernetes.io/projected/2ce7b034-fc6b-489c-bbec-999e7b4d3099-kube-api-access-ldcnb\") pod \"2ce7b034-fc6b-489c-bbec-999e7b4d3099\" (UID: \"2ce7b034-fc6b-489c-bbec-999e7b4d3099\") " Sep 29 19:14:50 crc kubenswrapper[4792]: I0929 19:14:50.457486 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/2ce7b034-fc6b-489c-bbec-999e7b4d3099-var-run-ovn\") pod \"2ce7b034-fc6b-489c-bbec-999e7b4d3099\" (UID: \"2ce7b034-fc6b-489c-bbec-999e7b4d3099\") " Sep 29 19:14:50 crc kubenswrapper[4792]: I0929 19:14:50.457524 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/2ce7b034-fc6b-489c-bbec-999e7b4d3099-var-log-ovn\") pod \"2ce7b034-fc6b-489c-bbec-999e7b4d3099\" (UID: \"2ce7b034-fc6b-489c-bbec-999e7b4d3099\") " Sep 29 19:14:50 crc kubenswrapper[4792]: I0929 19:14:50.457560 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/654442d0-5361-4c10-b60a-2eb3bcf71acd-dispersionconf\") pod \"654442d0-5361-4c10-b60a-2eb3bcf71acd\" (UID: \"654442d0-5361-4c10-b60a-2eb3bcf71acd\") " Sep 29 19:14:50 crc kubenswrapper[4792]: I0929 19:14:50.457634 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/2ce7b034-fc6b-489c-bbec-999e7b4d3099-var-run\") pod \"2ce7b034-fc6b-489c-bbec-999e7b4d3099\" (UID: \"2ce7b034-fc6b-489c-bbec-999e7b4d3099\") " Sep 29 19:14:50 crc kubenswrapper[4792]: I0929 19:14:50.457683 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/654442d0-5361-4c10-b60a-2eb3bcf71acd-ring-data-devices\") pod \"654442d0-5361-4c10-b60a-2eb3bcf71acd\" (UID: \"654442d0-5361-4c10-b60a-2eb3bcf71acd\") " Sep 29 19:14:50 crc kubenswrapper[4792]: I0929 19:14:50.460015 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/2ce7b034-fc6b-489c-bbec-999e7b4d3099-var-run-ovn" (OuterVolumeSpecName: "var-run-ovn") pod "2ce7b034-fc6b-489c-bbec-999e7b4d3099" (UID: "2ce7b034-fc6b-489c-bbec-999e7b4d3099"). InnerVolumeSpecName "var-run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 29 19:14:50 crc kubenswrapper[4792]: I0929 19:14:50.461282 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/654442d0-5361-4c10-b60a-2eb3bcf71acd-ring-data-devices" (OuterVolumeSpecName: "ring-data-devices") pod "654442d0-5361-4c10-b60a-2eb3bcf71acd" (UID: "654442d0-5361-4c10-b60a-2eb3bcf71acd"). InnerVolumeSpecName "ring-data-devices". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:14:50 crc kubenswrapper[4792]: I0929 19:14:50.461342 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/2ce7b034-fc6b-489c-bbec-999e7b4d3099-var-log-ovn" (OuterVolumeSpecName: "var-log-ovn") pod "2ce7b034-fc6b-489c-bbec-999e7b4d3099" (UID: "2ce7b034-fc6b-489c-bbec-999e7b4d3099"). InnerVolumeSpecName "var-log-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 29 19:14:50 crc kubenswrapper[4792]: I0929 19:14:50.462916 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2ce7b034-fc6b-489c-bbec-999e7b4d3099-scripts" (OuterVolumeSpecName: "scripts") pod "2ce7b034-fc6b-489c-bbec-999e7b4d3099" (UID: "2ce7b034-fc6b-489c-bbec-999e7b4d3099"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:14:50 crc kubenswrapper[4792]: I0929 19:14:50.464352 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/654442d0-5361-4c10-b60a-2eb3bcf71acd-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "654442d0-5361-4c10-b60a-2eb3bcf71acd" (UID: "654442d0-5361-4c10-b60a-2eb3bcf71acd"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 19:14:50 crc kubenswrapper[4792]: I0929 19:14:50.467549 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/2ce7b034-fc6b-489c-bbec-999e7b4d3099-var-run" (OuterVolumeSpecName: "var-run") pod "2ce7b034-fc6b-489c-bbec-999e7b4d3099" (UID: "2ce7b034-fc6b-489c-bbec-999e7b4d3099"). InnerVolumeSpecName "var-run". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 29 19:14:50 crc kubenswrapper[4792]: I0929 19:14:50.469037 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2ce7b034-fc6b-489c-bbec-999e7b4d3099-additional-scripts" (OuterVolumeSpecName: "additional-scripts") pod "2ce7b034-fc6b-489c-bbec-999e7b4d3099" (UID: "2ce7b034-fc6b-489c-bbec-999e7b4d3099"). InnerVolumeSpecName "additional-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:14:50 crc kubenswrapper[4792]: I0929 19:14:50.491139 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2ce7b034-fc6b-489c-bbec-999e7b4d3099-kube-api-access-ldcnb" (OuterVolumeSpecName: "kube-api-access-ldcnb") pod "2ce7b034-fc6b-489c-bbec-999e7b4d3099" (UID: "2ce7b034-fc6b-489c-bbec-999e7b4d3099"). InnerVolumeSpecName "kube-api-access-ldcnb". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:14:50 crc kubenswrapper[4792]: I0929 19:14:50.493442 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/654442d0-5361-4c10-b60a-2eb3bcf71acd-kube-api-access-lnrz7" (OuterVolumeSpecName: "kube-api-access-lnrz7") pod "654442d0-5361-4c10-b60a-2eb3bcf71acd" (UID: "654442d0-5361-4c10-b60a-2eb3bcf71acd"). InnerVolumeSpecName "kube-api-access-lnrz7". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:14:50 crc kubenswrapper[4792]: I0929 19:14:50.496808 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/654442d0-5361-4c10-b60a-2eb3bcf71acd-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "654442d0-5361-4c10-b60a-2eb3bcf71acd" (UID: "654442d0-5361-4c10-b60a-2eb3bcf71acd"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:14:50 crc kubenswrapper[4792]: I0929 19:14:50.498064 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/654442d0-5361-4c10-b60a-2eb3bcf71acd-dispersionconf" (OuterVolumeSpecName: "dispersionconf") pod "654442d0-5361-4c10-b60a-2eb3bcf71acd" (UID: "654442d0-5361-4c10-b60a-2eb3bcf71acd"). InnerVolumeSpecName "dispersionconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:14:50 crc kubenswrapper[4792]: I0929 19:14:50.504828 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/654442d0-5361-4c10-b60a-2eb3bcf71acd-scripts" (OuterVolumeSpecName: "scripts") pod "654442d0-5361-4c10-b60a-2eb3bcf71acd" (UID: "654442d0-5361-4c10-b60a-2eb3bcf71acd"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:14:50 crc kubenswrapper[4792]: I0929 19:14:50.528123 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/654442d0-5361-4c10-b60a-2eb3bcf71acd-swiftconf" (OuterVolumeSpecName: "swiftconf") pod "654442d0-5361-4c10-b60a-2eb3bcf71acd" (UID: "654442d0-5361-4c10-b60a-2eb3bcf71acd"). InnerVolumeSpecName "swiftconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:14:50 crc kubenswrapper[4792]: I0929 19:14:50.561187 4792 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/654442d0-5361-4c10-b60a-2eb3bcf71acd-etc-swift\") on node \"crc\" DevicePath \"\"" Sep 29 19:14:50 crc kubenswrapper[4792]: I0929 19:14:50.561218 4792 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/654442d0-5361-4c10-b60a-2eb3bcf71acd-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 19:14:50 crc kubenswrapper[4792]: I0929 19:14:50.561230 4792 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/2ce7b034-fc6b-489c-bbec-999e7b4d3099-scripts\") on node \"crc\" DevicePath \"\"" Sep 29 19:14:50 crc kubenswrapper[4792]: I0929 19:14:50.561238 4792 reconciler_common.go:293] "Volume detached for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/2ce7b034-fc6b-489c-bbec-999e7b4d3099-additional-scripts\") on node \"crc\" DevicePath \"\"" Sep 29 19:14:50 crc kubenswrapper[4792]: I0929 19:14:50.561246 4792 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/654442d0-5361-4c10-b60a-2eb3bcf71acd-scripts\") on node \"crc\" DevicePath \"\"" Sep 29 19:14:50 crc kubenswrapper[4792]: I0929 19:14:50.561254 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lnrz7\" (UniqueName: \"kubernetes.io/projected/654442d0-5361-4c10-b60a-2eb3bcf71acd-kube-api-access-lnrz7\") on node \"crc\" DevicePath \"\"" Sep 29 19:14:50 crc kubenswrapper[4792]: I0929 19:14:50.561262 4792 reconciler_common.go:293] "Volume detached for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/654442d0-5361-4c10-b60a-2eb3bcf71acd-swiftconf\") on node \"crc\" DevicePath \"\"" Sep 29 19:14:50 crc kubenswrapper[4792]: I0929 19:14:50.561270 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ldcnb\" (UniqueName: \"kubernetes.io/projected/2ce7b034-fc6b-489c-bbec-999e7b4d3099-kube-api-access-ldcnb\") on node \"crc\" DevicePath \"\"" Sep 29 19:14:50 crc kubenswrapper[4792]: I0929 19:14:50.561277 4792 reconciler_common.go:293] "Volume detached for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/2ce7b034-fc6b-489c-bbec-999e7b4d3099-var-run-ovn\") on node \"crc\" DevicePath \"\"" Sep 29 19:14:50 crc kubenswrapper[4792]: I0929 19:14:50.561284 4792 reconciler_common.go:293] "Volume detached for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/2ce7b034-fc6b-489c-bbec-999e7b4d3099-var-log-ovn\") on node \"crc\" DevicePath \"\"" Sep 29 19:14:50 crc kubenswrapper[4792]: I0929 19:14:50.561293 4792 reconciler_common.go:293] "Volume detached for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/654442d0-5361-4c10-b60a-2eb3bcf71acd-dispersionconf\") on node \"crc\" DevicePath \"\"" Sep 29 19:14:50 crc kubenswrapper[4792]: I0929 19:14:50.561301 4792 reconciler_common.go:293] "Volume detached for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/2ce7b034-fc6b-489c-bbec-999e7b4d3099-var-run\") on node \"crc\" DevicePath \"\"" Sep 29 19:14:50 crc kubenswrapper[4792]: I0929 19:14:50.561308 4792 reconciler_common.go:293] "Volume detached for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/654442d0-5361-4c10-b60a-2eb3bcf71acd-ring-data-devices\") on node \"crc\" DevicePath \"\"" Sep 29 19:14:50 crc kubenswrapper[4792]: I0929 19:14:50.922797 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-zvckm-config-544zc" Sep 29 19:14:50 crc kubenswrapper[4792]: I0929 19:14:50.922811 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-zvckm-config-544zc" event={"ID":"2ce7b034-fc6b-489c-bbec-999e7b4d3099","Type":"ContainerDied","Data":"5d4390f81d76112aade079d84085763652ace954f52760e23f3ddead484007b8"} Sep 29 19:14:50 crc kubenswrapper[4792]: I0929 19:14:50.922917 4792 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5d4390f81d76112aade079d84085763652ace954f52760e23f3ddead484007b8" Sep 29 19:14:50 crc kubenswrapper[4792]: I0929 19:14:50.924271 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-scl6k" event={"ID":"654442d0-5361-4c10-b60a-2eb3bcf71acd","Type":"ContainerDied","Data":"9024a0c3fde020da990ab38ce179d636b0feef9fe611f8df021e63afe867ab6a"} Sep 29 19:14:50 crc kubenswrapper[4792]: I0929 19:14:50.924293 4792 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9024a0c3fde020da990ab38ce179d636b0feef9fe611f8df021e63afe867ab6a" Sep 29 19:14:50 crc kubenswrapper[4792]: I0929 19:14:50.924339 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-scl6k" Sep 29 19:14:51 crc kubenswrapper[4792]: I0929 19:14:51.056573 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-zvckm-config-544zc"] Sep 29 19:14:51 crc kubenswrapper[4792]: I0929 19:14:51.064568 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovn-controller-zvckm-config-544zc"] Sep 29 19:14:51 crc kubenswrapper[4792]: I0929 19:14:51.253011 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-zvckm" Sep 29 19:14:53 crc kubenswrapper[4792]: I0929 19:14:53.025194 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2ce7b034-fc6b-489c-bbec-999e7b4d3099" path="/var/lib/kubelet/pods/2ce7b034-fc6b-489c-bbec-999e7b4d3099/volumes" Sep 29 19:14:55 crc kubenswrapper[4792]: I0929 19:14:55.806112 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-server-0" Sep 29 19:14:56 crc kubenswrapper[4792]: I0929 19:14:56.144440 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-db-create-xpptn"] Sep 29 19:14:56 crc kubenswrapper[4792]: E0929 19:14:56.145239 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="654442d0-5361-4c10-b60a-2eb3bcf71acd" containerName="swift-ring-rebalance" Sep 29 19:14:56 crc kubenswrapper[4792]: I0929 19:14:56.145256 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="654442d0-5361-4c10-b60a-2eb3bcf71acd" containerName="swift-ring-rebalance" Sep 29 19:14:56 crc kubenswrapper[4792]: E0929 19:14:56.145267 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2ce7b034-fc6b-489c-bbec-999e7b4d3099" containerName="ovn-config" Sep 29 19:14:56 crc kubenswrapper[4792]: I0929 19:14:56.145274 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="2ce7b034-fc6b-489c-bbec-999e7b4d3099" containerName="ovn-config" Sep 29 19:14:56 crc kubenswrapper[4792]: I0929 19:14:56.145433 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="654442d0-5361-4c10-b60a-2eb3bcf71acd" containerName="swift-ring-rebalance" Sep 29 19:14:56 crc kubenswrapper[4792]: I0929 19:14:56.145450 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="2ce7b034-fc6b-489c-bbec-999e7b4d3099" containerName="ovn-config" Sep 29 19:14:56 crc kubenswrapper[4792]: I0929 19:14:56.145940 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-xpptn" Sep 29 19:14:56 crc kubenswrapper[4792]: I0929 19:14:56.157816 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-create-xpptn"] Sep 29 19:14:56 crc kubenswrapper[4792]: I0929 19:14:56.217007 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-cell1-server-0" Sep 29 19:14:56 crc kubenswrapper[4792]: I0929 19:14:56.239765 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-db-create-lvkq7"] Sep 29 19:14:56 crc kubenswrapper[4792]: I0929 19:14:56.240730 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-lvkq7" Sep 29 19:14:56 crc kubenswrapper[4792]: I0929 19:14:56.268212 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-49jrz\" (UniqueName: \"kubernetes.io/projected/acba234d-48c9-44b8-99ed-ce58f33c8dbd-kube-api-access-49jrz\") pod \"barbican-db-create-xpptn\" (UID: \"acba234d-48c9-44b8-99ed-ce58f33c8dbd\") " pod="openstack/barbican-db-create-xpptn" Sep 29 19:14:56 crc kubenswrapper[4792]: I0929 19:14:56.270652 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-create-lvkq7"] Sep 29 19:14:56 crc kubenswrapper[4792]: I0929 19:14:56.369772 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-49jrz\" (UniqueName: \"kubernetes.io/projected/acba234d-48c9-44b8-99ed-ce58f33c8dbd-kube-api-access-49jrz\") pod \"barbican-db-create-xpptn\" (UID: \"acba234d-48c9-44b8-99ed-ce58f33c8dbd\") " pod="openstack/barbican-db-create-xpptn" Sep 29 19:14:56 crc kubenswrapper[4792]: I0929 19:14:56.369818 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pc4qf\" (UniqueName: \"kubernetes.io/projected/11ca06bb-7266-4314-b094-0b34f5531fda-kube-api-access-pc4qf\") pod \"cinder-db-create-lvkq7\" (UID: \"11ca06bb-7266-4314-b094-0b34f5531fda\") " pod="openstack/cinder-db-create-lvkq7" Sep 29 19:14:56 crc kubenswrapper[4792]: I0929 19:14:56.395527 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-49jrz\" (UniqueName: \"kubernetes.io/projected/acba234d-48c9-44b8-99ed-ce58f33c8dbd-kube-api-access-49jrz\") pod \"barbican-db-create-xpptn\" (UID: \"acba234d-48c9-44b8-99ed-ce58f33c8dbd\") " pod="openstack/barbican-db-create-xpptn" Sep 29 19:14:56 crc kubenswrapper[4792]: I0929 19:14:56.453766 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-db-create-lzctq"] Sep 29 19:14:56 crc kubenswrapper[4792]: I0929 19:14:56.457547 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-lzctq" Sep 29 19:14:56 crc kubenswrapper[4792]: I0929 19:14:56.463819 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-create-lzctq"] Sep 29 19:14:56 crc kubenswrapper[4792]: I0929 19:14:56.471383 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pc4qf\" (UniqueName: \"kubernetes.io/projected/11ca06bb-7266-4314-b094-0b34f5531fda-kube-api-access-pc4qf\") pod \"cinder-db-create-lvkq7\" (UID: \"11ca06bb-7266-4314-b094-0b34f5531fda\") " pod="openstack/cinder-db-create-lvkq7" Sep 29 19:14:56 crc kubenswrapper[4792]: I0929 19:14:56.476623 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-xpptn" Sep 29 19:14:56 crc kubenswrapper[4792]: I0929 19:14:56.500993 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pc4qf\" (UniqueName: \"kubernetes.io/projected/11ca06bb-7266-4314-b094-0b34f5531fda-kube-api-access-pc4qf\") pod \"cinder-db-create-lvkq7\" (UID: \"11ca06bb-7266-4314-b094-0b34f5531fda\") " pod="openstack/cinder-db-create-lvkq7" Sep 29 19:14:56 crc kubenswrapper[4792]: I0929 19:14:56.553636 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-lvkq7" Sep 29 19:14:56 crc kubenswrapper[4792]: I0929 19:14:56.572903 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tfvb4\" (UniqueName: \"kubernetes.io/projected/c4720cde-c7b5-4bf3-9aff-8daee7fe6211-kube-api-access-tfvb4\") pod \"neutron-db-create-lzctq\" (UID: \"c4720cde-c7b5-4bf3-9aff-8daee7fe6211\") " pod="openstack/neutron-db-create-lzctq" Sep 29 19:14:56 crc kubenswrapper[4792]: I0929 19:14:56.674310 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tfvb4\" (UniqueName: \"kubernetes.io/projected/c4720cde-c7b5-4bf3-9aff-8daee7fe6211-kube-api-access-tfvb4\") pod \"neutron-db-create-lzctq\" (UID: \"c4720cde-c7b5-4bf3-9aff-8daee7fe6211\") " pod="openstack/neutron-db-create-lzctq" Sep 29 19:14:56 crc kubenswrapper[4792]: I0929 19:14:56.705484 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tfvb4\" (UniqueName: \"kubernetes.io/projected/c4720cde-c7b5-4bf3-9aff-8daee7fe6211-kube-api-access-tfvb4\") pod \"neutron-db-create-lzctq\" (UID: \"c4720cde-c7b5-4bf3-9aff-8daee7fe6211\") " pod="openstack/neutron-db-create-lzctq" Sep 29 19:14:56 crc kubenswrapper[4792]: I0929 19:14:56.775522 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-lzctq" Sep 29 19:14:58 crc kubenswrapper[4792]: I0929 19:14:58.944052 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-860c-account-create-rqnbc"] Sep 29 19:14:58 crc kubenswrapper[4792]: I0929 19:14:58.945724 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-860c-account-create-rqnbc" Sep 29 19:14:58 crc kubenswrapper[4792]: I0929 19:14:58.948327 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-db-secret" Sep 29 19:14:59 crc kubenswrapper[4792]: I0929 19:14:58.999993 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-860c-account-create-rqnbc"] Sep 29 19:14:59 crc kubenswrapper[4792]: I0929 19:14:59.013885 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mh2zm\" (UniqueName: \"kubernetes.io/projected/2711541c-2b88-4ca2-9519-5ca1e8ebb8a6-kube-api-access-mh2zm\") pod \"keystone-860c-account-create-rqnbc\" (UID: \"2711541c-2b88-4ca2-9519-5ca1e8ebb8a6\") " pod="openstack/keystone-860c-account-create-rqnbc" Sep 29 19:14:59 crc kubenswrapper[4792]: I0929 19:14:59.115042 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mh2zm\" (UniqueName: \"kubernetes.io/projected/2711541c-2b88-4ca2-9519-5ca1e8ebb8a6-kube-api-access-mh2zm\") pod \"keystone-860c-account-create-rqnbc\" (UID: \"2711541c-2b88-4ca2-9519-5ca1e8ebb8a6\") " pod="openstack/keystone-860c-account-create-rqnbc" Sep 29 19:14:59 crc kubenswrapper[4792]: I0929 19:14:59.147087 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mh2zm\" (UniqueName: \"kubernetes.io/projected/2711541c-2b88-4ca2-9519-5ca1e8ebb8a6-kube-api-access-mh2zm\") pod \"keystone-860c-account-create-rqnbc\" (UID: \"2711541c-2b88-4ca2-9519-5ca1e8ebb8a6\") " pod="openstack/keystone-860c-account-create-rqnbc" Sep 29 19:14:59 crc kubenswrapper[4792]: I0929 19:14:59.264917 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-860c-account-create-rqnbc" Sep 29 19:15:00 crc kubenswrapper[4792]: I0929 19:15:00.127594 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319555-29ttb"] Sep 29 19:15:00 crc kubenswrapper[4792]: I0929 19:15:00.130795 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319555-29ttb" Sep 29 19:15:00 crc kubenswrapper[4792]: I0929 19:15:00.133198 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Sep 29 19:15:00 crc kubenswrapper[4792]: I0929 19:15:00.133564 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Sep 29 19:15:00 crc kubenswrapper[4792]: I0929 19:15:00.143372 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319555-29ttb"] Sep 29 19:15:00 crc kubenswrapper[4792]: I0929 19:15:00.242274 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/ba543e31-cf1d-49bb-8097-ed5d69bc0e2c-secret-volume\") pod \"collect-profiles-29319555-29ttb\" (UID: \"ba543e31-cf1d-49bb-8097-ed5d69bc0e2c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319555-29ttb" Sep 29 19:15:00 crc kubenswrapper[4792]: I0929 19:15:00.242353 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lqfw8\" (UniqueName: \"kubernetes.io/projected/ba543e31-cf1d-49bb-8097-ed5d69bc0e2c-kube-api-access-lqfw8\") pod \"collect-profiles-29319555-29ttb\" (UID: \"ba543e31-cf1d-49bb-8097-ed5d69bc0e2c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319555-29ttb" Sep 29 19:15:00 crc kubenswrapper[4792]: I0929 19:15:00.242528 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/ba543e31-cf1d-49bb-8097-ed5d69bc0e2c-config-volume\") pod \"collect-profiles-29319555-29ttb\" (UID: \"ba543e31-cf1d-49bb-8097-ed5d69bc0e2c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319555-29ttb" Sep 29 19:15:00 crc kubenswrapper[4792]: I0929 19:15:00.344416 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/ba543e31-cf1d-49bb-8097-ed5d69bc0e2c-config-volume\") pod \"collect-profiles-29319555-29ttb\" (UID: \"ba543e31-cf1d-49bb-8097-ed5d69bc0e2c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319555-29ttb" Sep 29 19:15:00 crc kubenswrapper[4792]: I0929 19:15:00.344516 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/ba543e31-cf1d-49bb-8097-ed5d69bc0e2c-secret-volume\") pod \"collect-profiles-29319555-29ttb\" (UID: \"ba543e31-cf1d-49bb-8097-ed5d69bc0e2c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319555-29ttb" Sep 29 19:15:00 crc kubenswrapper[4792]: I0929 19:15:00.344566 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lqfw8\" (UniqueName: \"kubernetes.io/projected/ba543e31-cf1d-49bb-8097-ed5d69bc0e2c-kube-api-access-lqfw8\") pod \"collect-profiles-29319555-29ttb\" (UID: \"ba543e31-cf1d-49bb-8097-ed5d69bc0e2c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319555-29ttb" Sep 29 19:15:00 crc kubenswrapper[4792]: I0929 19:15:00.345750 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/ba543e31-cf1d-49bb-8097-ed5d69bc0e2c-config-volume\") pod \"collect-profiles-29319555-29ttb\" (UID: \"ba543e31-cf1d-49bb-8097-ed5d69bc0e2c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319555-29ttb" Sep 29 19:15:00 crc kubenswrapper[4792]: I0929 19:15:00.349726 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/ba543e31-cf1d-49bb-8097-ed5d69bc0e2c-secret-volume\") pod \"collect-profiles-29319555-29ttb\" (UID: \"ba543e31-cf1d-49bb-8097-ed5d69bc0e2c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319555-29ttb" Sep 29 19:15:00 crc kubenswrapper[4792]: I0929 19:15:00.361269 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lqfw8\" (UniqueName: \"kubernetes.io/projected/ba543e31-cf1d-49bb-8097-ed5d69bc0e2c-kube-api-access-lqfw8\") pod \"collect-profiles-29319555-29ttb\" (UID: \"ba543e31-cf1d-49bb-8097-ed5d69bc0e2c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319555-29ttb" Sep 29 19:15:00 crc kubenswrapper[4792]: I0929 19:15:00.460726 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319555-29ttb" Sep 29 19:15:01 crc kubenswrapper[4792]: I0929 19:15:01.487360 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-create-lzctq"] Sep 29 19:15:01 crc kubenswrapper[4792]: I0929 19:15:01.503028 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-create-xpptn"] Sep 29 19:15:01 crc kubenswrapper[4792]: I0929 19:15:01.508908 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319555-29ttb"] Sep 29 19:15:01 crc kubenswrapper[4792]: I0929 19:15:01.543697 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-create-lvkq7"] Sep 29 19:15:01 crc kubenswrapper[4792]: I0929 19:15:01.658353 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-860c-account-create-rqnbc"] Sep 29 19:15:01 crc kubenswrapper[4792]: W0929 19:15:01.663633 4792 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2711541c_2b88_4ca2_9519_5ca1e8ebb8a6.slice/crio-b2465f6baab78c83c10e874b4b5ad921f144dbd18774b4cbab54ce6d4461426f WatchSource:0}: Error finding container b2465f6baab78c83c10e874b4b5ad921f144dbd18774b4cbab54ce6d4461426f: Status 404 returned error can't find the container with id b2465f6baab78c83c10e874b4b5ad921f144dbd18774b4cbab54ce6d4461426f Sep 29 19:15:02 crc kubenswrapper[4792]: I0929 19:15:02.013630 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-hr5x7" event={"ID":"f52c7bda-8340-49b2-9f94-2767ab141a81","Type":"ContainerStarted","Data":"5deab9e68e296346f4cb638c1c76c1e575af121f11caef29da0e3a18b2f684c8"} Sep 29 19:15:02 crc kubenswrapper[4792]: I0929 19:15:02.016278 4792 generic.go:334] "Generic (PLEG): container finished" podID="11ca06bb-7266-4314-b094-0b34f5531fda" containerID="ff133d042b720fee1687e13a12024ebbae6d9dea5a29474449cbe5667c9bff5e" exitCode=0 Sep 29 19:15:02 crc kubenswrapper[4792]: I0929 19:15:02.016371 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-lvkq7" event={"ID":"11ca06bb-7266-4314-b094-0b34f5531fda","Type":"ContainerDied","Data":"ff133d042b720fee1687e13a12024ebbae6d9dea5a29474449cbe5667c9bff5e"} Sep 29 19:15:02 crc kubenswrapper[4792]: I0929 19:15:02.016665 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-lvkq7" event={"ID":"11ca06bb-7266-4314-b094-0b34f5531fda","Type":"ContainerStarted","Data":"1ed82e5b544c692674eb5bb8758917715c1a5bef63b24a3ca00d5c3f444d6656"} Sep 29 19:15:02 crc kubenswrapper[4792]: I0929 19:15:02.018011 4792 generic.go:334] "Generic (PLEG): container finished" podID="c4720cde-c7b5-4bf3-9aff-8daee7fe6211" containerID="ddc9d28107fa91cdf470c188847390a41b636284fe7fd5d3c085cf389a63d428" exitCode=0 Sep 29 19:15:02 crc kubenswrapper[4792]: I0929 19:15:02.018062 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-lzctq" event={"ID":"c4720cde-c7b5-4bf3-9aff-8daee7fe6211","Type":"ContainerDied","Data":"ddc9d28107fa91cdf470c188847390a41b636284fe7fd5d3c085cf389a63d428"} Sep 29 19:15:02 crc kubenswrapper[4792]: I0929 19:15:02.018080 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-lzctq" event={"ID":"c4720cde-c7b5-4bf3-9aff-8daee7fe6211","Type":"ContainerStarted","Data":"34525cf546f4b8f7787cd5723c0fcf42303e299729673c4b4974f6aa068c4dea"} Sep 29 19:15:02 crc kubenswrapper[4792]: I0929 19:15:02.019470 4792 generic.go:334] "Generic (PLEG): container finished" podID="ba543e31-cf1d-49bb-8097-ed5d69bc0e2c" containerID="eec6e0b1a6d04da1ac0d34a22fdf6c655c3f300b62718c0a019d8c1e79aacdef" exitCode=0 Sep 29 19:15:02 crc kubenswrapper[4792]: I0929 19:15:02.019517 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29319555-29ttb" event={"ID":"ba543e31-cf1d-49bb-8097-ed5d69bc0e2c","Type":"ContainerDied","Data":"eec6e0b1a6d04da1ac0d34a22fdf6c655c3f300b62718c0a019d8c1e79aacdef"} Sep 29 19:15:02 crc kubenswrapper[4792]: I0929 19:15:02.019537 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29319555-29ttb" event={"ID":"ba543e31-cf1d-49bb-8097-ed5d69bc0e2c","Type":"ContainerStarted","Data":"354e910c76820b3051d55367e261b86220f2c2d7ee36801f45d2d8a0c93b2bad"} Sep 29 19:15:02 crc kubenswrapper[4792]: I0929 19:15:02.021793 4792 generic.go:334] "Generic (PLEG): container finished" podID="2711541c-2b88-4ca2-9519-5ca1e8ebb8a6" containerID="f3e4079c2dc0435bfa60ed75b4ff57bd896c9cde399f1fac9497685694618adb" exitCode=0 Sep 29 19:15:02 crc kubenswrapper[4792]: I0929 19:15:02.021943 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-860c-account-create-rqnbc" event={"ID":"2711541c-2b88-4ca2-9519-5ca1e8ebb8a6","Type":"ContainerDied","Data":"f3e4079c2dc0435bfa60ed75b4ff57bd896c9cde399f1fac9497685694618adb"} Sep 29 19:15:02 crc kubenswrapper[4792]: I0929 19:15:02.022021 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-860c-account-create-rqnbc" event={"ID":"2711541c-2b88-4ca2-9519-5ca1e8ebb8a6","Type":"ContainerStarted","Data":"b2465f6baab78c83c10e874b4b5ad921f144dbd18774b4cbab54ce6d4461426f"} Sep 29 19:15:02 crc kubenswrapper[4792]: I0929 19:15:02.023489 4792 generic.go:334] "Generic (PLEG): container finished" podID="acba234d-48c9-44b8-99ed-ce58f33c8dbd" containerID="5a0b18fcc7d14c9991621e948f72d3109f320e075f37cc23c34f2fd7db77c3f7" exitCode=0 Sep 29 19:15:02 crc kubenswrapper[4792]: I0929 19:15:02.023607 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-xpptn" event={"ID":"acba234d-48c9-44b8-99ed-ce58f33c8dbd","Type":"ContainerDied","Data":"5a0b18fcc7d14c9991621e948f72d3109f320e075f37cc23c34f2fd7db77c3f7"} Sep 29 19:15:02 crc kubenswrapper[4792]: I0929 19:15:02.023690 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-xpptn" event={"ID":"acba234d-48c9-44b8-99ed-ce58f33c8dbd","Type":"ContainerStarted","Data":"c294a500855c20af5dac5f6e1f63aacb82a332afde040c62016350e0f595c038"} Sep 29 19:15:02 crc kubenswrapper[4792]: I0929 19:15:02.046021 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-db-sync-hr5x7" podStartSLOduration=2.973130072 podStartE2EDuration="18.046001926s" podCreationTimestamp="2025-09-29 19:14:44 +0000 UTC" firstStartedPulling="2025-09-29 19:14:45.873006033 +0000 UTC m=+1097.866313429" lastFinishedPulling="2025-09-29 19:15:00.945877887 +0000 UTC m=+1112.939185283" observedRunningTime="2025-09-29 19:15:02.035459515 +0000 UTC m=+1114.028766921" watchObservedRunningTime="2025-09-29 19:15:02.046001926 +0000 UTC m=+1114.039309332" Sep 29 19:15:03 crc kubenswrapper[4792]: I0929 19:15:03.430489 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319555-29ttb" Sep 29 19:15:03 crc kubenswrapper[4792]: I0929 19:15:03.511864 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lqfw8\" (UniqueName: \"kubernetes.io/projected/ba543e31-cf1d-49bb-8097-ed5d69bc0e2c-kube-api-access-lqfw8\") pod \"ba543e31-cf1d-49bb-8097-ed5d69bc0e2c\" (UID: \"ba543e31-cf1d-49bb-8097-ed5d69bc0e2c\") " Sep 29 19:15:03 crc kubenswrapper[4792]: I0929 19:15:03.512166 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/ba543e31-cf1d-49bb-8097-ed5d69bc0e2c-config-volume\") pod \"ba543e31-cf1d-49bb-8097-ed5d69bc0e2c\" (UID: \"ba543e31-cf1d-49bb-8097-ed5d69bc0e2c\") " Sep 29 19:15:03 crc kubenswrapper[4792]: I0929 19:15:03.512196 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/ba543e31-cf1d-49bb-8097-ed5d69bc0e2c-secret-volume\") pod \"ba543e31-cf1d-49bb-8097-ed5d69bc0e2c\" (UID: \"ba543e31-cf1d-49bb-8097-ed5d69bc0e2c\") " Sep 29 19:15:03 crc kubenswrapper[4792]: I0929 19:15:03.513253 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ba543e31-cf1d-49bb-8097-ed5d69bc0e2c-config-volume" (OuterVolumeSpecName: "config-volume") pod "ba543e31-cf1d-49bb-8097-ed5d69bc0e2c" (UID: "ba543e31-cf1d-49bb-8097-ed5d69bc0e2c"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:15:03 crc kubenswrapper[4792]: I0929 19:15:03.518000 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ba543e31-cf1d-49bb-8097-ed5d69bc0e2c-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "ba543e31-cf1d-49bb-8097-ed5d69bc0e2c" (UID: "ba543e31-cf1d-49bb-8097-ed5d69bc0e2c"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:15:03 crc kubenswrapper[4792]: I0929 19:15:03.518242 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ba543e31-cf1d-49bb-8097-ed5d69bc0e2c-kube-api-access-lqfw8" (OuterVolumeSpecName: "kube-api-access-lqfw8") pod "ba543e31-cf1d-49bb-8097-ed5d69bc0e2c" (UID: "ba543e31-cf1d-49bb-8097-ed5d69bc0e2c"). InnerVolumeSpecName "kube-api-access-lqfw8". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:15:03 crc kubenswrapper[4792]: I0929 19:15:03.580587 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-lzctq" Sep 29 19:15:03 crc kubenswrapper[4792]: I0929 19:15:03.586676 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-860c-account-create-rqnbc" Sep 29 19:15:03 crc kubenswrapper[4792]: I0929 19:15:03.606440 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-xpptn" Sep 29 19:15:03 crc kubenswrapper[4792]: I0929 19:15:03.613904 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lqfw8\" (UniqueName: \"kubernetes.io/projected/ba543e31-cf1d-49bb-8097-ed5d69bc0e2c-kube-api-access-lqfw8\") on node \"crc\" DevicePath \"\"" Sep 29 19:15:03 crc kubenswrapper[4792]: I0929 19:15:03.613939 4792 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/ba543e31-cf1d-49bb-8097-ed5d69bc0e2c-config-volume\") on node \"crc\" DevicePath \"\"" Sep 29 19:15:03 crc kubenswrapper[4792]: I0929 19:15:03.613947 4792 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/ba543e31-cf1d-49bb-8097-ed5d69bc0e2c-secret-volume\") on node \"crc\" DevicePath \"\"" Sep 29 19:15:03 crc kubenswrapper[4792]: I0929 19:15:03.614325 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-lvkq7" Sep 29 19:15:03 crc kubenswrapper[4792]: I0929 19:15:03.714780 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-49jrz\" (UniqueName: \"kubernetes.io/projected/acba234d-48c9-44b8-99ed-ce58f33c8dbd-kube-api-access-49jrz\") pod \"acba234d-48c9-44b8-99ed-ce58f33c8dbd\" (UID: \"acba234d-48c9-44b8-99ed-ce58f33c8dbd\") " Sep 29 19:15:03 crc kubenswrapper[4792]: I0929 19:15:03.714883 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mh2zm\" (UniqueName: \"kubernetes.io/projected/2711541c-2b88-4ca2-9519-5ca1e8ebb8a6-kube-api-access-mh2zm\") pod \"2711541c-2b88-4ca2-9519-5ca1e8ebb8a6\" (UID: \"2711541c-2b88-4ca2-9519-5ca1e8ebb8a6\") " Sep 29 19:15:03 crc kubenswrapper[4792]: I0929 19:15:03.714960 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tfvb4\" (UniqueName: \"kubernetes.io/projected/c4720cde-c7b5-4bf3-9aff-8daee7fe6211-kube-api-access-tfvb4\") pod \"c4720cde-c7b5-4bf3-9aff-8daee7fe6211\" (UID: \"c4720cde-c7b5-4bf3-9aff-8daee7fe6211\") " Sep 29 19:15:03 crc kubenswrapper[4792]: I0929 19:15:03.714981 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pc4qf\" (UniqueName: \"kubernetes.io/projected/11ca06bb-7266-4314-b094-0b34f5531fda-kube-api-access-pc4qf\") pod \"11ca06bb-7266-4314-b094-0b34f5531fda\" (UID: \"11ca06bb-7266-4314-b094-0b34f5531fda\") " Sep 29 19:15:03 crc kubenswrapper[4792]: I0929 19:15:03.718183 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c4720cde-c7b5-4bf3-9aff-8daee7fe6211-kube-api-access-tfvb4" (OuterVolumeSpecName: "kube-api-access-tfvb4") pod "c4720cde-c7b5-4bf3-9aff-8daee7fe6211" (UID: "c4720cde-c7b5-4bf3-9aff-8daee7fe6211"). InnerVolumeSpecName "kube-api-access-tfvb4". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:15:03 crc kubenswrapper[4792]: I0929 19:15:03.718324 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/acba234d-48c9-44b8-99ed-ce58f33c8dbd-kube-api-access-49jrz" (OuterVolumeSpecName: "kube-api-access-49jrz") pod "acba234d-48c9-44b8-99ed-ce58f33c8dbd" (UID: "acba234d-48c9-44b8-99ed-ce58f33c8dbd"). InnerVolumeSpecName "kube-api-access-49jrz". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:15:03 crc kubenswrapper[4792]: I0929 19:15:03.718598 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/11ca06bb-7266-4314-b094-0b34f5531fda-kube-api-access-pc4qf" (OuterVolumeSpecName: "kube-api-access-pc4qf") pod "11ca06bb-7266-4314-b094-0b34f5531fda" (UID: "11ca06bb-7266-4314-b094-0b34f5531fda"). InnerVolumeSpecName "kube-api-access-pc4qf". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:15:03 crc kubenswrapper[4792]: I0929 19:15:03.722091 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2711541c-2b88-4ca2-9519-5ca1e8ebb8a6-kube-api-access-mh2zm" (OuterVolumeSpecName: "kube-api-access-mh2zm") pod "2711541c-2b88-4ca2-9519-5ca1e8ebb8a6" (UID: "2711541c-2b88-4ca2-9519-5ca1e8ebb8a6"). InnerVolumeSpecName "kube-api-access-mh2zm". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:15:03 crc kubenswrapper[4792]: I0929 19:15:03.817413 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tfvb4\" (UniqueName: \"kubernetes.io/projected/c4720cde-c7b5-4bf3-9aff-8daee7fe6211-kube-api-access-tfvb4\") on node \"crc\" DevicePath \"\"" Sep 29 19:15:03 crc kubenswrapper[4792]: I0929 19:15:03.817440 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pc4qf\" (UniqueName: \"kubernetes.io/projected/11ca06bb-7266-4314-b094-0b34f5531fda-kube-api-access-pc4qf\") on node \"crc\" DevicePath \"\"" Sep 29 19:15:03 crc kubenswrapper[4792]: I0929 19:15:03.817461 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-49jrz\" (UniqueName: \"kubernetes.io/projected/acba234d-48c9-44b8-99ed-ce58f33c8dbd-kube-api-access-49jrz\") on node \"crc\" DevicePath \"\"" Sep 29 19:15:03 crc kubenswrapper[4792]: I0929 19:15:03.817513 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mh2zm\" (UniqueName: \"kubernetes.io/projected/2711541c-2b88-4ca2-9519-5ca1e8ebb8a6-kube-api-access-mh2zm\") on node \"crc\" DevicePath \"\"" Sep 29 19:15:04 crc kubenswrapper[4792]: I0929 19:15:04.049065 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29319555-29ttb" event={"ID":"ba543e31-cf1d-49bb-8097-ed5d69bc0e2c","Type":"ContainerDied","Data":"354e910c76820b3051d55367e261b86220f2c2d7ee36801f45d2d8a0c93b2bad"} Sep 29 19:15:04 crc kubenswrapper[4792]: I0929 19:15:04.049145 4792 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="354e910c76820b3051d55367e261b86220f2c2d7ee36801f45d2d8a0c93b2bad" Sep 29 19:15:04 crc kubenswrapper[4792]: I0929 19:15:04.049107 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319555-29ttb" Sep 29 19:15:04 crc kubenswrapper[4792]: I0929 19:15:04.057225 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-860c-account-create-rqnbc" Sep 29 19:15:04 crc kubenswrapper[4792]: I0929 19:15:04.057261 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-860c-account-create-rqnbc" event={"ID":"2711541c-2b88-4ca2-9519-5ca1e8ebb8a6","Type":"ContainerDied","Data":"b2465f6baab78c83c10e874b4b5ad921f144dbd18774b4cbab54ce6d4461426f"} Sep 29 19:15:04 crc kubenswrapper[4792]: I0929 19:15:04.057317 4792 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b2465f6baab78c83c10e874b4b5ad921f144dbd18774b4cbab54ce6d4461426f" Sep 29 19:15:04 crc kubenswrapper[4792]: I0929 19:15:04.059005 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-xpptn" event={"ID":"acba234d-48c9-44b8-99ed-ce58f33c8dbd","Type":"ContainerDied","Data":"c294a500855c20af5dac5f6e1f63aacb82a332afde040c62016350e0f595c038"} Sep 29 19:15:04 crc kubenswrapper[4792]: I0929 19:15:04.059027 4792 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c294a500855c20af5dac5f6e1f63aacb82a332afde040c62016350e0f595c038" Sep 29 19:15:04 crc kubenswrapper[4792]: I0929 19:15:04.059096 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-xpptn" Sep 29 19:15:04 crc kubenswrapper[4792]: I0929 19:15:04.060973 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-lvkq7" event={"ID":"11ca06bb-7266-4314-b094-0b34f5531fda","Type":"ContainerDied","Data":"1ed82e5b544c692674eb5bb8758917715c1a5bef63b24a3ca00d5c3f444d6656"} Sep 29 19:15:04 crc kubenswrapper[4792]: I0929 19:15:04.061015 4792 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1ed82e5b544c692674eb5bb8758917715c1a5bef63b24a3ca00d5c3f444d6656" Sep 29 19:15:04 crc kubenswrapper[4792]: I0929 19:15:04.061110 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-lvkq7" Sep 29 19:15:04 crc kubenswrapper[4792]: I0929 19:15:04.063604 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-lzctq" event={"ID":"c4720cde-c7b5-4bf3-9aff-8daee7fe6211","Type":"ContainerDied","Data":"34525cf546f4b8f7787cd5723c0fcf42303e299729673c4b4974f6aa068c4dea"} Sep 29 19:15:04 crc kubenswrapper[4792]: I0929 19:15:04.063631 4792 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="34525cf546f4b8f7787cd5723c0fcf42303e299729673c4b4974f6aa068c4dea" Sep 29 19:15:04 crc kubenswrapper[4792]: I0929 19:15:04.063684 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-lzctq" Sep 29 19:15:04 crc kubenswrapper[4792]: I0929 19:15:04.732206 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/cd33a904-c32b-4781-b3fe-53d903764497-etc-swift\") pod \"swift-storage-0\" (UID: \"cd33a904-c32b-4781-b3fe-53d903764497\") " pod="openstack/swift-storage-0" Sep 29 19:15:04 crc kubenswrapper[4792]: I0929 19:15:04.737791 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/cd33a904-c32b-4781-b3fe-53d903764497-etc-swift\") pod \"swift-storage-0\" (UID: \"cd33a904-c32b-4781-b3fe-53d903764497\") " pod="openstack/swift-storage-0" Sep 29 19:15:04 crc kubenswrapper[4792]: I0929 19:15:04.914301 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-storage-0" Sep 29 19:15:05 crc kubenswrapper[4792]: I0929 19:15:05.413018 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-storage-0"] Sep 29 19:15:06 crc kubenswrapper[4792]: I0929 19:15:06.084583 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"cd33a904-c32b-4781-b3fe-53d903764497","Type":"ContainerStarted","Data":"130383cc38e387649c6b341f6a56c85112935193744606d77c98431a0b2978e3"} Sep 29 19:15:07 crc kubenswrapper[4792]: I0929 19:15:07.094137 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"cd33a904-c32b-4781-b3fe-53d903764497","Type":"ContainerStarted","Data":"dd2b22317c37fa5b927ac43e5ce39f92cd9404d404fe831796d6a6eea62ada90"} Sep 29 19:15:07 crc kubenswrapper[4792]: I0929 19:15:07.094482 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"cd33a904-c32b-4781-b3fe-53d903764497","Type":"ContainerStarted","Data":"6500ba12df64e8dc95b27f3b6947954169dbc6accfe9f20557b65ca7a1511adb"} Sep 29 19:15:07 crc kubenswrapper[4792]: I0929 19:15:07.094491 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"cd33a904-c32b-4781-b3fe-53d903764497","Type":"ContainerStarted","Data":"bf800ba745d3c072672394ab48db9807f24e26890c48c3ee532151789900515b"} Sep 29 19:15:08 crc kubenswrapper[4792]: I0929 19:15:08.108767 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"cd33a904-c32b-4781-b3fe-53d903764497","Type":"ContainerStarted","Data":"5f5e4491a695a6f0c49836494f5d52fcb156ee73b7dce4a87bfbc48d661987ed"} Sep 29 19:15:09 crc kubenswrapper[4792]: I0929 19:15:09.552949 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-db-sync-9mkf6"] Sep 29 19:15:09 crc kubenswrapper[4792]: E0929 19:15:09.556514 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ba543e31-cf1d-49bb-8097-ed5d69bc0e2c" containerName="collect-profiles" Sep 29 19:15:09 crc kubenswrapper[4792]: I0929 19:15:09.557779 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="ba543e31-cf1d-49bb-8097-ed5d69bc0e2c" containerName="collect-profiles" Sep 29 19:15:09 crc kubenswrapper[4792]: E0929 19:15:09.557891 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="acba234d-48c9-44b8-99ed-ce58f33c8dbd" containerName="mariadb-database-create" Sep 29 19:15:09 crc kubenswrapper[4792]: I0929 19:15:09.557958 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="acba234d-48c9-44b8-99ed-ce58f33c8dbd" containerName="mariadb-database-create" Sep 29 19:15:09 crc kubenswrapper[4792]: E0929 19:15:09.558028 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="11ca06bb-7266-4314-b094-0b34f5531fda" containerName="mariadb-database-create" Sep 29 19:15:09 crc kubenswrapper[4792]: I0929 19:15:09.558092 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="11ca06bb-7266-4314-b094-0b34f5531fda" containerName="mariadb-database-create" Sep 29 19:15:09 crc kubenswrapper[4792]: E0929 19:15:09.558155 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2711541c-2b88-4ca2-9519-5ca1e8ebb8a6" containerName="mariadb-account-create" Sep 29 19:15:09 crc kubenswrapper[4792]: I0929 19:15:09.558211 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="2711541c-2b88-4ca2-9519-5ca1e8ebb8a6" containerName="mariadb-account-create" Sep 29 19:15:09 crc kubenswrapper[4792]: E0929 19:15:09.558273 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c4720cde-c7b5-4bf3-9aff-8daee7fe6211" containerName="mariadb-database-create" Sep 29 19:15:09 crc kubenswrapper[4792]: I0929 19:15:09.558331 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="c4720cde-c7b5-4bf3-9aff-8daee7fe6211" containerName="mariadb-database-create" Sep 29 19:15:09 crc kubenswrapper[4792]: I0929 19:15:09.558566 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="11ca06bb-7266-4314-b094-0b34f5531fda" containerName="mariadb-database-create" Sep 29 19:15:09 crc kubenswrapper[4792]: I0929 19:15:09.558660 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="c4720cde-c7b5-4bf3-9aff-8daee7fe6211" containerName="mariadb-database-create" Sep 29 19:15:09 crc kubenswrapper[4792]: I0929 19:15:09.558735 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="acba234d-48c9-44b8-99ed-ce58f33c8dbd" containerName="mariadb-database-create" Sep 29 19:15:09 crc kubenswrapper[4792]: I0929 19:15:09.558817 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="2711541c-2b88-4ca2-9519-5ca1e8ebb8a6" containerName="mariadb-account-create" Sep 29 19:15:09 crc kubenswrapper[4792]: I0929 19:15:09.558909 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="ba543e31-cf1d-49bb-8097-ed5d69bc0e2c" containerName="collect-profiles" Sep 29 19:15:09 crc kubenswrapper[4792]: I0929 19:15:09.559517 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-9mkf6" Sep 29 19:15:09 crc kubenswrapper[4792]: I0929 19:15:09.565337 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Sep 29 19:15:09 crc kubenswrapper[4792]: I0929 19:15:09.565504 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-58chc" Sep 29 19:15:09 crc kubenswrapper[4792]: I0929 19:15:09.565626 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Sep 29 19:15:09 crc kubenswrapper[4792]: I0929 19:15:09.565804 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Sep 29 19:15:09 crc kubenswrapper[4792]: I0929 19:15:09.571720 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-sync-9mkf6"] Sep 29 19:15:09 crc kubenswrapper[4792]: I0929 19:15:09.621284 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ef8285b6-60ee-4b53-bb6a-ffb4c9a6bb1e-combined-ca-bundle\") pod \"keystone-db-sync-9mkf6\" (UID: \"ef8285b6-60ee-4b53-bb6a-ffb4c9a6bb1e\") " pod="openstack/keystone-db-sync-9mkf6" Sep 29 19:15:09 crc kubenswrapper[4792]: I0929 19:15:09.621421 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ef8285b6-60ee-4b53-bb6a-ffb4c9a6bb1e-config-data\") pod \"keystone-db-sync-9mkf6\" (UID: \"ef8285b6-60ee-4b53-bb6a-ffb4c9a6bb1e\") " pod="openstack/keystone-db-sync-9mkf6" Sep 29 19:15:09 crc kubenswrapper[4792]: I0929 19:15:09.621465 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d5g6q\" (UniqueName: \"kubernetes.io/projected/ef8285b6-60ee-4b53-bb6a-ffb4c9a6bb1e-kube-api-access-d5g6q\") pod \"keystone-db-sync-9mkf6\" (UID: \"ef8285b6-60ee-4b53-bb6a-ffb4c9a6bb1e\") " pod="openstack/keystone-db-sync-9mkf6" Sep 29 19:15:09 crc kubenswrapper[4792]: I0929 19:15:09.723366 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ef8285b6-60ee-4b53-bb6a-ffb4c9a6bb1e-combined-ca-bundle\") pod \"keystone-db-sync-9mkf6\" (UID: \"ef8285b6-60ee-4b53-bb6a-ffb4c9a6bb1e\") " pod="openstack/keystone-db-sync-9mkf6" Sep 29 19:15:09 crc kubenswrapper[4792]: I0929 19:15:09.723506 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ef8285b6-60ee-4b53-bb6a-ffb4c9a6bb1e-config-data\") pod \"keystone-db-sync-9mkf6\" (UID: \"ef8285b6-60ee-4b53-bb6a-ffb4c9a6bb1e\") " pod="openstack/keystone-db-sync-9mkf6" Sep 29 19:15:09 crc kubenswrapper[4792]: I0929 19:15:09.723554 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d5g6q\" (UniqueName: \"kubernetes.io/projected/ef8285b6-60ee-4b53-bb6a-ffb4c9a6bb1e-kube-api-access-d5g6q\") pod \"keystone-db-sync-9mkf6\" (UID: \"ef8285b6-60ee-4b53-bb6a-ffb4c9a6bb1e\") " pod="openstack/keystone-db-sync-9mkf6" Sep 29 19:15:09 crc kubenswrapper[4792]: I0929 19:15:09.731749 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ef8285b6-60ee-4b53-bb6a-ffb4c9a6bb1e-combined-ca-bundle\") pod \"keystone-db-sync-9mkf6\" (UID: \"ef8285b6-60ee-4b53-bb6a-ffb4c9a6bb1e\") " pod="openstack/keystone-db-sync-9mkf6" Sep 29 19:15:09 crc kubenswrapper[4792]: I0929 19:15:09.743018 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ef8285b6-60ee-4b53-bb6a-ffb4c9a6bb1e-config-data\") pod \"keystone-db-sync-9mkf6\" (UID: \"ef8285b6-60ee-4b53-bb6a-ffb4c9a6bb1e\") " pod="openstack/keystone-db-sync-9mkf6" Sep 29 19:15:09 crc kubenswrapper[4792]: I0929 19:15:09.743645 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d5g6q\" (UniqueName: \"kubernetes.io/projected/ef8285b6-60ee-4b53-bb6a-ffb4c9a6bb1e-kube-api-access-d5g6q\") pod \"keystone-db-sync-9mkf6\" (UID: \"ef8285b6-60ee-4b53-bb6a-ffb4c9a6bb1e\") " pod="openstack/keystone-db-sync-9mkf6" Sep 29 19:15:09 crc kubenswrapper[4792]: I0929 19:15:09.881146 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-9mkf6" Sep 29 19:15:10 crc kubenswrapper[4792]: I0929 19:15:10.126235 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"cd33a904-c32b-4781-b3fe-53d903764497","Type":"ContainerStarted","Data":"0c160da99935a0798437f7003b36e79486d831624c3449b15343e95afbd062f5"} Sep 29 19:15:10 crc kubenswrapper[4792]: I0929 19:15:10.126556 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"cd33a904-c32b-4781-b3fe-53d903764497","Type":"ContainerStarted","Data":"50970e4c29b367295a9204e60daf636ca513c578fd77f35d22cd1f2c4650b442"} Sep 29 19:15:10 crc kubenswrapper[4792]: I0929 19:15:10.126567 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"cd33a904-c32b-4781-b3fe-53d903764497","Type":"ContainerStarted","Data":"2c50562014d87015ce31aafc10e13063808467ea4579fd67680f5cbc474b6247"} Sep 29 19:15:10 crc kubenswrapper[4792]: I0929 19:15:10.126578 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"cd33a904-c32b-4781-b3fe-53d903764497","Type":"ContainerStarted","Data":"27dad3e07b1ad6fe3cca24a2158e8cdcc08466c16b507211e97cd33eeca0e307"} Sep 29 19:15:10 crc kubenswrapper[4792]: I0929 19:15:10.403756 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-sync-9mkf6"] Sep 29 19:15:10 crc kubenswrapper[4792]: W0929 19:15:10.411216 4792 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podef8285b6_60ee_4b53_bb6a_ffb4c9a6bb1e.slice/crio-47d23d282f55a650238ec179a88ac2a12f171cba8507047881677a14138723a2 WatchSource:0}: Error finding container 47d23d282f55a650238ec179a88ac2a12f171cba8507047881677a14138723a2: Status 404 returned error can't find the container with id 47d23d282f55a650238ec179a88ac2a12f171cba8507047881677a14138723a2 Sep 29 19:15:11 crc kubenswrapper[4792]: I0929 19:15:11.134972 4792 generic.go:334] "Generic (PLEG): container finished" podID="f52c7bda-8340-49b2-9f94-2767ab141a81" containerID="5deab9e68e296346f4cb638c1c76c1e575af121f11caef29da0e3a18b2f684c8" exitCode=0 Sep 29 19:15:11 crc kubenswrapper[4792]: I0929 19:15:11.135051 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-hr5x7" event={"ID":"f52c7bda-8340-49b2-9f94-2767ab141a81","Type":"ContainerDied","Data":"5deab9e68e296346f4cb638c1c76c1e575af121f11caef29da0e3a18b2f684c8"} Sep 29 19:15:11 crc kubenswrapper[4792]: I0929 19:15:11.136648 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-9mkf6" event={"ID":"ef8285b6-60ee-4b53-bb6a-ffb4c9a6bb1e","Type":"ContainerStarted","Data":"47d23d282f55a650238ec179a88ac2a12f171cba8507047881677a14138723a2"} Sep 29 19:15:12 crc kubenswrapper[4792]: I0929 19:15:12.163169 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"cd33a904-c32b-4781-b3fe-53d903764497","Type":"ContainerStarted","Data":"4eeac79b8078d98598dda7ba214cb738760b2ec673785dcfa9e2a52f1110549e"} Sep 29 19:15:12 crc kubenswrapper[4792]: I0929 19:15:12.163408 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"cd33a904-c32b-4781-b3fe-53d903764497","Type":"ContainerStarted","Data":"36c9218397858218812a79c2b1f44da756b778a1b982ba8f1df6791a2150caa6"} Sep 29 19:15:12 crc kubenswrapper[4792]: I0929 19:15:12.163418 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"cd33a904-c32b-4781-b3fe-53d903764497","Type":"ContainerStarted","Data":"3d3a9d335690827b652cd0e282d152b983a3421ddabb4789d89d0584d4220f73"} Sep 29 19:15:12 crc kubenswrapper[4792]: I0929 19:15:12.163426 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"cd33a904-c32b-4781-b3fe-53d903764497","Type":"ContainerStarted","Data":"b51cbadfeaad72ba1d665fb62469a38685c04172afe6dca6f3afad90baf966be"} Sep 29 19:15:12 crc kubenswrapper[4792]: I0929 19:15:12.163435 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"cd33a904-c32b-4781-b3fe-53d903764497","Type":"ContainerStarted","Data":"8b931f88d9d035c938ea18fd774d084e64c310a6c8807853cc6784b3febd2948"} Sep 29 19:15:12 crc kubenswrapper[4792]: I0929 19:15:12.606530 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-hr5x7" Sep 29 19:15:12 crc kubenswrapper[4792]: I0929 19:15:12.669384 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f52c7bda-8340-49b2-9f94-2767ab141a81-config-data\") pod \"f52c7bda-8340-49b2-9f94-2767ab141a81\" (UID: \"f52c7bda-8340-49b2-9f94-2767ab141a81\") " Sep 29 19:15:12 crc kubenswrapper[4792]: I0929 19:15:12.669505 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f52c7bda-8340-49b2-9f94-2767ab141a81-combined-ca-bundle\") pod \"f52c7bda-8340-49b2-9f94-2767ab141a81\" (UID: \"f52c7bda-8340-49b2-9f94-2767ab141a81\") " Sep 29 19:15:12 crc kubenswrapper[4792]: I0929 19:15:12.669614 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-p5b9z\" (UniqueName: \"kubernetes.io/projected/f52c7bda-8340-49b2-9f94-2767ab141a81-kube-api-access-p5b9z\") pod \"f52c7bda-8340-49b2-9f94-2767ab141a81\" (UID: \"f52c7bda-8340-49b2-9f94-2767ab141a81\") " Sep 29 19:15:12 crc kubenswrapper[4792]: I0929 19:15:12.669630 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/f52c7bda-8340-49b2-9f94-2767ab141a81-db-sync-config-data\") pod \"f52c7bda-8340-49b2-9f94-2767ab141a81\" (UID: \"f52c7bda-8340-49b2-9f94-2767ab141a81\") " Sep 29 19:15:12 crc kubenswrapper[4792]: I0929 19:15:12.677179 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f52c7bda-8340-49b2-9f94-2767ab141a81-kube-api-access-p5b9z" (OuterVolumeSpecName: "kube-api-access-p5b9z") pod "f52c7bda-8340-49b2-9f94-2767ab141a81" (UID: "f52c7bda-8340-49b2-9f94-2767ab141a81"). InnerVolumeSpecName "kube-api-access-p5b9z". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:15:12 crc kubenswrapper[4792]: I0929 19:15:12.677976 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f52c7bda-8340-49b2-9f94-2767ab141a81-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "f52c7bda-8340-49b2-9f94-2767ab141a81" (UID: "f52c7bda-8340-49b2-9f94-2767ab141a81"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:15:12 crc kubenswrapper[4792]: I0929 19:15:12.701930 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f52c7bda-8340-49b2-9f94-2767ab141a81-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f52c7bda-8340-49b2-9f94-2767ab141a81" (UID: "f52c7bda-8340-49b2-9f94-2767ab141a81"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:15:12 crc kubenswrapper[4792]: I0929 19:15:12.715632 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f52c7bda-8340-49b2-9f94-2767ab141a81-config-data" (OuterVolumeSpecName: "config-data") pod "f52c7bda-8340-49b2-9f94-2767ab141a81" (UID: "f52c7bda-8340-49b2-9f94-2767ab141a81"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:15:12 crc kubenswrapper[4792]: I0929 19:15:12.772370 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-p5b9z\" (UniqueName: \"kubernetes.io/projected/f52c7bda-8340-49b2-9f94-2767ab141a81-kube-api-access-p5b9z\") on node \"crc\" DevicePath \"\"" Sep 29 19:15:12 crc kubenswrapper[4792]: I0929 19:15:12.772406 4792 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/f52c7bda-8340-49b2-9f94-2767ab141a81-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 19:15:12 crc kubenswrapper[4792]: I0929 19:15:12.772445 4792 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f52c7bda-8340-49b2-9f94-2767ab141a81-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 19:15:12 crc kubenswrapper[4792]: I0929 19:15:12.772456 4792 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f52c7bda-8340-49b2-9f94-2767ab141a81-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 19:15:13 crc kubenswrapper[4792]: I0929 19:15:13.194825 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"cd33a904-c32b-4781-b3fe-53d903764497","Type":"ContainerStarted","Data":"21469c19909ba9871f4c881a16664e57a0d980c13ed72b7ae268374f56b5caad"} Sep 29 19:15:13 crc kubenswrapper[4792]: I0929 19:15:13.195057 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"cd33a904-c32b-4781-b3fe-53d903764497","Type":"ContainerStarted","Data":"e8a138c4ad7afe0e2895b1f4a427dc44c15752433c2fb4217491965acc8d3319"} Sep 29 19:15:13 crc kubenswrapper[4792]: I0929 19:15:13.200229 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-hr5x7" event={"ID":"f52c7bda-8340-49b2-9f94-2767ab141a81","Type":"ContainerDied","Data":"44ac9356848c540300c27ef07e2e8e27fb43849bcab55a5c90623aeddce9afcc"} Sep 29 19:15:13 crc kubenswrapper[4792]: I0929 19:15:13.200267 4792 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="44ac9356848c540300c27ef07e2e8e27fb43849bcab55a5c90623aeddce9afcc" Sep 29 19:15:13 crc kubenswrapper[4792]: I0929 19:15:13.200551 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-hr5x7" Sep 29 19:15:13 crc kubenswrapper[4792]: I0929 19:15:13.525026 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/swift-storage-0" podStartSLOduration=36.782846292 podStartE2EDuration="42.524990802s" podCreationTimestamp="2025-09-29 19:14:31 +0000 UTC" firstStartedPulling="2025-09-29 19:15:05.426126181 +0000 UTC m=+1117.419433577" lastFinishedPulling="2025-09-29 19:15:11.168270691 +0000 UTC m=+1123.161578087" observedRunningTime="2025-09-29 19:15:13.244891852 +0000 UTC m=+1125.238199288" watchObservedRunningTime="2025-09-29 19:15:13.524990802 +0000 UTC m=+1125.518298198" Sep 29 19:15:13 crc kubenswrapper[4792]: I0929 19:15:13.526192 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-74dc88fc-nmvvz"] Sep 29 19:15:13 crc kubenswrapper[4792]: E0929 19:15:13.528205 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f52c7bda-8340-49b2-9f94-2767ab141a81" containerName="glance-db-sync" Sep 29 19:15:13 crc kubenswrapper[4792]: I0929 19:15:13.528225 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="f52c7bda-8340-49b2-9f94-2767ab141a81" containerName="glance-db-sync" Sep 29 19:15:13 crc kubenswrapper[4792]: I0929 19:15:13.528429 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="f52c7bda-8340-49b2-9f94-2767ab141a81" containerName="glance-db-sync" Sep 29 19:15:13 crc kubenswrapper[4792]: I0929 19:15:13.529488 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-74dc88fc-nmvvz" Sep 29 19:15:13 crc kubenswrapper[4792]: I0929 19:15:13.547013 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-74dc88fc-nmvvz"] Sep 29 19:15:13 crc kubenswrapper[4792]: I0929 19:15:13.590400 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1275aff8-0d26-4364-87aa-ce06b8944db1-dns-svc\") pod \"dnsmasq-dns-74dc88fc-nmvvz\" (UID: \"1275aff8-0d26-4364-87aa-ce06b8944db1\") " pod="openstack/dnsmasq-dns-74dc88fc-nmvvz" Sep 29 19:15:13 crc kubenswrapper[4792]: I0929 19:15:13.590448 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1275aff8-0d26-4364-87aa-ce06b8944db1-config\") pod \"dnsmasq-dns-74dc88fc-nmvvz\" (UID: \"1275aff8-0d26-4364-87aa-ce06b8944db1\") " pod="openstack/dnsmasq-dns-74dc88fc-nmvvz" Sep 29 19:15:13 crc kubenswrapper[4792]: I0929 19:15:13.590558 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bqwt5\" (UniqueName: \"kubernetes.io/projected/1275aff8-0d26-4364-87aa-ce06b8944db1-kube-api-access-bqwt5\") pod \"dnsmasq-dns-74dc88fc-nmvvz\" (UID: \"1275aff8-0d26-4364-87aa-ce06b8944db1\") " pod="openstack/dnsmasq-dns-74dc88fc-nmvvz" Sep 29 19:15:13 crc kubenswrapper[4792]: I0929 19:15:13.590596 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/1275aff8-0d26-4364-87aa-ce06b8944db1-ovsdbserver-nb\") pod \"dnsmasq-dns-74dc88fc-nmvvz\" (UID: \"1275aff8-0d26-4364-87aa-ce06b8944db1\") " pod="openstack/dnsmasq-dns-74dc88fc-nmvvz" Sep 29 19:15:13 crc kubenswrapper[4792]: I0929 19:15:13.590617 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/1275aff8-0d26-4364-87aa-ce06b8944db1-ovsdbserver-sb\") pod \"dnsmasq-dns-74dc88fc-nmvvz\" (UID: \"1275aff8-0d26-4364-87aa-ce06b8944db1\") " pod="openstack/dnsmasq-dns-74dc88fc-nmvvz" Sep 29 19:15:13 crc kubenswrapper[4792]: I0929 19:15:13.666030 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-74dc88fc-nmvvz"] Sep 29 19:15:13 crc kubenswrapper[4792]: E0929 19:15:13.666385 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="unmounted volumes=[config dns-svc kube-api-access-bqwt5 ovsdbserver-nb ovsdbserver-sb], unattached volumes=[], failed to process volumes=[]: context canceled" pod="openstack/dnsmasq-dns-74dc88fc-nmvvz" podUID="1275aff8-0d26-4364-87aa-ce06b8944db1" Sep 29 19:15:13 crc kubenswrapper[4792]: I0929 19:15:13.692274 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bqwt5\" (UniqueName: \"kubernetes.io/projected/1275aff8-0d26-4364-87aa-ce06b8944db1-kube-api-access-bqwt5\") pod \"dnsmasq-dns-74dc88fc-nmvvz\" (UID: \"1275aff8-0d26-4364-87aa-ce06b8944db1\") " pod="openstack/dnsmasq-dns-74dc88fc-nmvvz" Sep 29 19:15:13 crc kubenswrapper[4792]: I0929 19:15:13.692320 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/1275aff8-0d26-4364-87aa-ce06b8944db1-ovsdbserver-nb\") pod \"dnsmasq-dns-74dc88fc-nmvvz\" (UID: \"1275aff8-0d26-4364-87aa-ce06b8944db1\") " pod="openstack/dnsmasq-dns-74dc88fc-nmvvz" Sep 29 19:15:13 crc kubenswrapper[4792]: I0929 19:15:13.692347 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/1275aff8-0d26-4364-87aa-ce06b8944db1-ovsdbserver-sb\") pod \"dnsmasq-dns-74dc88fc-nmvvz\" (UID: \"1275aff8-0d26-4364-87aa-ce06b8944db1\") " pod="openstack/dnsmasq-dns-74dc88fc-nmvvz" Sep 29 19:15:13 crc kubenswrapper[4792]: I0929 19:15:13.692373 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1275aff8-0d26-4364-87aa-ce06b8944db1-dns-svc\") pod \"dnsmasq-dns-74dc88fc-nmvvz\" (UID: \"1275aff8-0d26-4364-87aa-ce06b8944db1\") " pod="openstack/dnsmasq-dns-74dc88fc-nmvvz" Sep 29 19:15:13 crc kubenswrapper[4792]: I0929 19:15:13.692393 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1275aff8-0d26-4364-87aa-ce06b8944db1-config\") pod \"dnsmasq-dns-74dc88fc-nmvvz\" (UID: \"1275aff8-0d26-4364-87aa-ce06b8944db1\") " pod="openstack/dnsmasq-dns-74dc88fc-nmvvz" Sep 29 19:15:13 crc kubenswrapper[4792]: I0929 19:15:13.693161 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1275aff8-0d26-4364-87aa-ce06b8944db1-config\") pod \"dnsmasq-dns-74dc88fc-nmvvz\" (UID: \"1275aff8-0d26-4364-87aa-ce06b8944db1\") " pod="openstack/dnsmasq-dns-74dc88fc-nmvvz" Sep 29 19:15:13 crc kubenswrapper[4792]: I0929 19:15:13.693552 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/1275aff8-0d26-4364-87aa-ce06b8944db1-ovsdbserver-nb\") pod \"dnsmasq-dns-74dc88fc-nmvvz\" (UID: \"1275aff8-0d26-4364-87aa-ce06b8944db1\") " pod="openstack/dnsmasq-dns-74dc88fc-nmvvz" Sep 29 19:15:13 crc kubenswrapper[4792]: I0929 19:15:13.693735 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/1275aff8-0d26-4364-87aa-ce06b8944db1-ovsdbserver-sb\") pod \"dnsmasq-dns-74dc88fc-nmvvz\" (UID: \"1275aff8-0d26-4364-87aa-ce06b8944db1\") " pod="openstack/dnsmasq-dns-74dc88fc-nmvvz" Sep 29 19:15:13 crc kubenswrapper[4792]: I0929 19:15:13.694419 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1275aff8-0d26-4364-87aa-ce06b8944db1-dns-svc\") pod \"dnsmasq-dns-74dc88fc-nmvvz\" (UID: \"1275aff8-0d26-4364-87aa-ce06b8944db1\") " pod="openstack/dnsmasq-dns-74dc88fc-nmvvz" Sep 29 19:15:13 crc kubenswrapper[4792]: I0929 19:15:13.738043 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5f59b8f679-lwxwr"] Sep 29 19:15:13 crc kubenswrapper[4792]: I0929 19:15:13.738727 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bqwt5\" (UniqueName: \"kubernetes.io/projected/1275aff8-0d26-4364-87aa-ce06b8944db1-kube-api-access-bqwt5\") pod \"dnsmasq-dns-74dc88fc-nmvvz\" (UID: \"1275aff8-0d26-4364-87aa-ce06b8944db1\") " pod="openstack/dnsmasq-dns-74dc88fc-nmvvz" Sep 29 19:15:13 crc kubenswrapper[4792]: I0929 19:15:13.739793 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5f59b8f679-lwxwr" Sep 29 19:15:13 crc kubenswrapper[4792]: I0929 19:15:13.744770 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns-swift-storage-0" Sep 29 19:15:13 crc kubenswrapper[4792]: I0929 19:15:13.761087 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5f59b8f679-lwxwr"] Sep 29 19:15:13 crc kubenswrapper[4792]: I0929 19:15:13.795120 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3c6ec0a6-e0bd-4183-9730-2a420004fcae-config\") pod \"dnsmasq-dns-5f59b8f679-lwxwr\" (UID: \"3c6ec0a6-e0bd-4183-9730-2a420004fcae\") " pod="openstack/dnsmasq-dns-5f59b8f679-lwxwr" Sep 29 19:15:13 crc kubenswrapper[4792]: I0929 19:15:13.795165 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/3c6ec0a6-e0bd-4183-9730-2a420004fcae-ovsdbserver-sb\") pod \"dnsmasq-dns-5f59b8f679-lwxwr\" (UID: \"3c6ec0a6-e0bd-4183-9730-2a420004fcae\") " pod="openstack/dnsmasq-dns-5f59b8f679-lwxwr" Sep 29 19:15:13 crc kubenswrapper[4792]: I0929 19:15:13.795198 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/3c6ec0a6-e0bd-4183-9730-2a420004fcae-ovsdbserver-nb\") pod \"dnsmasq-dns-5f59b8f679-lwxwr\" (UID: \"3c6ec0a6-e0bd-4183-9730-2a420004fcae\") " pod="openstack/dnsmasq-dns-5f59b8f679-lwxwr" Sep 29 19:15:13 crc kubenswrapper[4792]: I0929 19:15:13.795249 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/3c6ec0a6-e0bd-4183-9730-2a420004fcae-dns-swift-storage-0\") pod \"dnsmasq-dns-5f59b8f679-lwxwr\" (UID: \"3c6ec0a6-e0bd-4183-9730-2a420004fcae\") " pod="openstack/dnsmasq-dns-5f59b8f679-lwxwr" Sep 29 19:15:13 crc kubenswrapper[4792]: I0929 19:15:13.795297 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/3c6ec0a6-e0bd-4183-9730-2a420004fcae-dns-svc\") pod \"dnsmasq-dns-5f59b8f679-lwxwr\" (UID: \"3c6ec0a6-e0bd-4183-9730-2a420004fcae\") " pod="openstack/dnsmasq-dns-5f59b8f679-lwxwr" Sep 29 19:15:13 crc kubenswrapper[4792]: I0929 19:15:13.795313 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rhfvg\" (UniqueName: \"kubernetes.io/projected/3c6ec0a6-e0bd-4183-9730-2a420004fcae-kube-api-access-rhfvg\") pod \"dnsmasq-dns-5f59b8f679-lwxwr\" (UID: \"3c6ec0a6-e0bd-4183-9730-2a420004fcae\") " pod="openstack/dnsmasq-dns-5f59b8f679-lwxwr" Sep 29 19:15:13 crc kubenswrapper[4792]: I0929 19:15:13.896434 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3c6ec0a6-e0bd-4183-9730-2a420004fcae-config\") pod \"dnsmasq-dns-5f59b8f679-lwxwr\" (UID: \"3c6ec0a6-e0bd-4183-9730-2a420004fcae\") " pod="openstack/dnsmasq-dns-5f59b8f679-lwxwr" Sep 29 19:15:13 crc kubenswrapper[4792]: I0929 19:15:13.896708 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/3c6ec0a6-e0bd-4183-9730-2a420004fcae-ovsdbserver-sb\") pod \"dnsmasq-dns-5f59b8f679-lwxwr\" (UID: \"3c6ec0a6-e0bd-4183-9730-2a420004fcae\") " pod="openstack/dnsmasq-dns-5f59b8f679-lwxwr" Sep 29 19:15:13 crc kubenswrapper[4792]: I0929 19:15:13.896741 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/3c6ec0a6-e0bd-4183-9730-2a420004fcae-ovsdbserver-nb\") pod \"dnsmasq-dns-5f59b8f679-lwxwr\" (UID: \"3c6ec0a6-e0bd-4183-9730-2a420004fcae\") " pod="openstack/dnsmasq-dns-5f59b8f679-lwxwr" Sep 29 19:15:13 crc kubenswrapper[4792]: I0929 19:15:13.897475 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3c6ec0a6-e0bd-4183-9730-2a420004fcae-config\") pod \"dnsmasq-dns-5f59b8f679-lwxwr\" (UID: \"3c6ec0a6-e0bd-4183-9730-2a420004fcae\") " pod="openstack/dnsmasq-dns-5f59b8f679-lwxwr" Sep 29 19:15:13 crc kubenswrapper[4792]: I0929 19:15:13.897536 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/3c6ec0a6-e0bd-4183-9730-2a420004fcae-ovsdbserver-sb\") pod \"dnsmasq-dns-5f59b8f679-lwxwr\" (UID: \"3c6ec0a6-e0bd-4183-9730-2a420004fcae\") " pod="openstack/dnsmasq-dns-5f59b8f679-lwxwr" Sep 29 19:15:13 crc kubenswrapper[4792]: I0929 19:15:13.897640 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/3c6ec0a6-e0bd-4183-9730-2a420004fcae-dns-swift-storage-0\") pod \"dnsmasq-dns-5f59b8f679-lwxwr\" (UID: \"3c6ec0a6-e0bd-4183-9730-2a420004fcae\") " pod="openstack/dnsmasq-dns-5f59b8f679-lwxwr" Sep 29 19:15:13 crc kubenswrapper[4792]: I0929 19:15:13.897750 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/3c6ec0a6-e0bd-4183-9730-2a420004fcae-dns-svc\") pod \"dnsmasq-dns-5f59b8f679-lwxwr\" (UID: \"3c6ec0a6-e0bd-4183-9730-2a420004fcae\") " pod="openstack/dnsmasq-dns-5f59b8f679-lwxwr" Sep 29 19:15:13 crc kubenswrapper[4792]: I0929 19:15:13.897770 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rhfvg\" (UniqueName: \"kubernetes.io/projected/3c6ec0a6-e0bd-4183-9730-2a420004fcae-kube-api-access-rhfvg\") pod \"dnsmasq-dns-5f59b8f679-lwxwr\" (UID: \"3c6ec0a6-e0bd-4183-9730-2a420004fcae\") " pod="openstack/dnsmasq-dns-5f59b8f679-lwxwr" Sep 29 19:15:13 crc kubenswrapper[4792]: I0929 19:15:13.898110 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/3c6ec0a6-e0bd-4183-9730-2a420004fcae-ovsdbserver-nb\") pod \"dnsmasq-dns-5f59b8f679-lwxwr\" (UID: \"3c6ec0a6-e0bd-4183-9730-2a420004fcae\") " pod="openstack/dnsmasq-dns-5f59b8f679-lwxwr" Sep 29 19:15:13 crc kubenswrapper[4792]: I0929 19:15:13.898642 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/3c6ec0a6-e0bd-4183-9730-2a420004fcae-dns-svc\") pod \"dnsmasq-dns-5f59b8f679-lwxwr\" (UID: \"3c6ec0a6-e0bd-4183-9730-2a420004fcae\") " pod="openstack/dnsmasq-dns-5f59b8f679-lwxwr" Sep 29 19:15:13 crc kubenswrapper[4792]: I0929 19:15:13.898666 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/3c6ec0a6-e0bd-4183-9730-2a420004fcae-dns-swift-storage-0\") pod \"dnsmasq-dns-5f59b8f679-lwxwr\" (UID: \"3c6ec0a6-e0bd-4183-9730-2a420004fcae\") " pod="openstack/dnsmasq-dns-5f59b8f679-lwxwr" Sep 29 19:15:13 crc kubenswrapper[4792]: I0929 19:15:13.919741 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rhfvg\" (UniqueName: \"kubernetes.io/projected/3c6ec0a6-e0bd-4183-9730-2a420004fcae-kube-api-access-rhfvg\") pod \"dnsmasq-dns-5f59b8f679-lwxwr\" (UID: \"3c6ec0a6-e0bd-4183-9730-2a420004fcae\") " pod="openstack/dnsmasq-dns-5f59b8f679-lwxwr" Sep 29 19:15:14 crc kubenswrapper[4792]: I0929 19:15:14.084294 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5f59b8f679-lwxwr" Sep 29 19:15:14 crc kubenswrapper[4792]: I0929 19:15:14.205767 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-74dc88fc-nmvvz" Sep 29 19:15:14 crc kubenswrapper[4792]: I0929 19:15:14.219630 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-74dc88fc-nmvvz" Sep 29 19:15:14 crc kubenswrapper[4792]: I0929 19:15:14.303199 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/1275aff8-0d26-4364-87aa-ce06b8944db1-ovsdbserver-sb\") pod \"1275aff8-0d26-4364-87aa-ce06b8944db1\" (UID: \"1275aff8-0d26-4364-87aa-ce06b8944db1\") " Sep 29 19:15:14 crc kubenswrapper[4792]: I0929 19:15:14.303351 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bqwt5\" (UniqueName: \"kubernetes.io/projected/1275aff8-0d26-4364-87aa-ce06b8944db1-kube-api-access-bqwt5\") pod \"1275aff8-0d26-4364-87aa-ce06b8944db1\" (UID: \"1275aff8-0d26-4364-87aa-ce06b8944db1\") " Sep 29 19:15:14 crc kubenswrapper[4792]: I0929 19:15:14.303456 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1275aff8-0d26-4364-87aa-ce06b8944db1-dns-svc\") pod \"1275aff8-0d26-4364-87aa-ce06b8944db1\" (UID: \"1275aff8-0d26-4364-87aa-ce06b8944db1\") " Sep 29 19:15:14 crc kubenswrapper[4792]: I0929 19:15:14.303496 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1275aff8-0d26-4364-87aa-ce06b8944db1-config\") pod \"1275aff8-0d26-4364-87aa-ce06b8944db1\" (UID: \"1275aff8-0d26-4364-87aa-ce06b8944db1\") " Sep 29 19:15:14 crc kubenswrapper[4792]: I0929 19:15:14.303544 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/1275aff8-0d26-4364-87aa-ce06b8944db1-ovsdbserver-nb\") pod \"1275aff8-0d26-4364-87aa-ce06b8944db1\" (UID: \"1275aff8-0d26-4364-87aa-ce06b8944db1\") " Sep 29 19:15:14 crc kubenswrapper[4792]: I0929 19:15:14.303724 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1275aff8-0d26-4364-87aa-ce06b8944db1-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "1275aff8-0d26-4364-87aa-ce06b8944db1" (UID: "1275aff8-0d26-4364-87aa-ce06b8944db1"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:15:14 crc kubenswrapper[4792]: I0929 19:15:14.304132 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1275aff8-0d26-4364-87aa-ce06b8944db1-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "1275aff8-0d26-4364-87aa-ce06b8944db1" (UID: "1275aff8-0d26-4364-87aa-ce06b8944db1"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:15:14 crc kubenswrapper[4792]: I0929 19:15:14.304167 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1275aff8-0d26-4364-87aa-ce06b8944db1-config" (OuterVolumeSpecName: "config") pod "1275aff8-0d26-4364-87aa-ce06b8944db1" (UID: "1275aff8-0d26-4364-87aa-ce06b8944db1"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:15:14 crc kubenswrapper[4792]: I0929 19:15:14.304216 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1275aff8-0d26-4364-87aa-ce06b8944db1-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "1275aff8-0d26-4364-87aa-ce06b8944db1" (UID: "1275aff8-0d26-4364-87aa-ce06b8944db1"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:15:14 crc kubenswrapper[4792]: I0929 19:15:14.304606 4792 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1275aff8-0d26-4364-87aa-ce06b8944db1-dns-svc\") on node \"crc\" DevicePath \"\"" Sep 29 19:15:14 crc kubenswrapper[4792]: I0929 19:15:14.304650 4792 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1275aff8-0d26-4364-87aa-ce06b8944db1-config\") on node \"crc\" DevicePath \"\"" Sep 29 19:15:14 crc kubenswrapper[4792]: I0929 19:15:14.304685 4792 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/1275aff8-0d26-4364-87aa-ce06b8944db1-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Sep 29 19:15:14 crc kubenswrapper[4792]: I0929 19:15:14.304699 4792 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/1275aff8-0d26-4364-87aa-ce06b8944db1-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Sep 29 19:15:14 crc kubenswrapper[4792]: I0929 19:15:14.309301 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1275aff8-0d26-4364-87aa-ce06b8944db1-kube-api-access-bqwt5" (OuterVolumeSpecName: "kube-api-access-bqwt5") pod "1275aff8-0d26-4364-87aa-ce06b8944db1" (UID: "1275aff8-0d26-4364-87aa-ce06b8944db1"). InnerVolumeSpecName "kube-api-access-bqwt5". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:15:14 crc kubenswrapper[4792]: I0929 19:15:14.406197 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bqwt5\" (UniqueName: \"kubernetes.io/projected/1275aff8-0d26-4364-87aa-ce06b8944db1-kube-api-access-bqwt5\") on node \"crc\" DevicePath \"\"" Sep 29 19:15:15 crc kubenswrapper[4792]: I0929 19:15:15.212704 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-74dc88fc-nmvvz" Sep 29 19:15:15 crc kubenswrapper[4792]: I0929 19:15:15.266847 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-74dc88fc-nmvvz"] Sep 29 19:15:15 crc kubenswrapper[4792]: I0929 19:15:15.280163 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-74dc88fc-nmvvz"] Sep 29 19:15:16 crc kubenswrapper[4792]: I0929 19:15:16.097772 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-9d5d-account-create-4l8xs"] Sep 29 19:15:16 crc kubenswrapper[4792]: I0929 19:15:16.099331 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-9d5d-account-create-4l8xs" Sep 29 19:15:16 crc kubenswrapper[4792]: I0929 19:15:16.103064 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-db-secret" Sep 29 19:15:16 crc kubenswrapper[4792]: I0929 19:15:16.109557 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-9d5d-account-create-4l8xs"] Sep 29 19:15:16 crc kubenswrapper[4792]: I0929 19:15:16.137276 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pmqc9\" (UniqueName: \"kubernetes.io/projected/305096ef-8cf0-4061-8153-17d2bbcb9e2b-kube-api-access-pmqc9\") pod \"barbican-9d5d-account-create-4l8xs\" (UID: \"305096ef-8cf0-4061-8153-17d2bbcb9e2b\") " pod="openstack/barbican-9d5d-account-create-4l8xs" Sep 29 19:15:16 crc kubenswrapper[4792]: I0929 19:15:16.239458 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pmqc9\" (UniqueName: \"kubernetes.io/projected/305096ef-8cf0-4061-8153-17d2bbcb9e2b-kube-api-access-pmqc9\") pod \"barbican-9d5d-account-create-4l8xs\" (UID: \"305096ef-8cf0-4061-8153-17d2bbcb9e2b\") " pod="openstack/barbican-9d5d-account-create-4l8xs" Sep 29 19:15:16 crc kubenswrapper[4792]: I0929 19:15:16.256942 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pmqc9\" (UniqueName: \"kubernetes.io/projected/305096ef-8cf0-4061-8153-17d2bbcb9e2b-kube-api-access-pmqc9\") pod \"barbican-9d5d-account-create-4l8xs\" (UID: \"305096ef-8cf0-4061-8153-17d2bbcb9e2b\") " pod="openstack/barbican-9d5d-account-create-4l8xs" Sep 29 19:15:16 crc kubenswrapper[4792]: I0929 19:15:16.290779 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-1bfc-account-create-dfjx4"] Sep 29 19:15:16 crc kubenswrapper[4792]: I0929 19:15:16.291790 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-1bfc-account-create-dfjx4" Sep 29 19:15:16 crc kubenswrapper[4792]: I0929 19:15:16.296177 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-db-secret" Sep 29 19:15:16 crc kubenswrapper[4792]: I0929 19:15:16.339836 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-1bfc-account-create-dfjx4"] Sep 29 19:15:16 crc kubenswrapper[4792]: I0929 19:15:16.432363 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-9d5d-account-create-4l8xs" Sep 29 19:15:16 crc kubenswrapper[4792]: I0929 19:15:16.442650 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nr4jk\" (UniqueName: \"kubernetes.io/projected/43a6587c-7796-4d5d-9f91-cff1dc257b26-kube-api-access-nr4jk\") pod \"cinder-1bfc-account-create-dfjx4\" (UID: \"43a6587c-7796-4d5d-9f91-cff1dc257b26\") " pod="openstack/cinder-1bfc-account-create-dfjx4" Sep 29 19:15:16 crc kubenswrapper[4792]: I0929 19:15:16.495328 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-2e1f-account-create-zc864"] Sep 29 19:15:16 crc kubenswrapper[4792]: I0929 19:15:16.501048 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-2e1f-account-create-zc864" Sep 29 19:15:16 crc kubenswrapper[4792]: I0929 19:15:16.504635 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-2e1f-account-create-zc864"] Sep 29 19:15:16 crc kubenswrapper[4792]: I0929 19:15:16.509555 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-db-secret" Sep 29 19:15:16 crc kubenswrapper[4792]: I0929 19:15:16.545658 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nr4jk\" (UniqueName: \"kubernetes.io/projected/43a6587c-7796-4d5d-9f91-cff1dc257b26-kube-api-access-nr4jk\") pod \"cinder-1bfc-account-create-dfjx4\" (UID: \"43a6587c-7796-4d5d-9f91-cff1dc257b26\") " pod="openstack/cinder-1bfc-account-create-dfjx4" Sep 29 19:15:16 crc kubenswrapper[4792]: I0929 19:15:16.566602 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nr4jk\" (UniqueName: \"kubernetes.io/projected/43a6587c-7796-4d5d-9f91-cff1dc257b26-kube-api-access-nr4jk\") pod \"cinder-1bfc-account-create-dfjx4\" (UID: \"43a6587c-7796-4d5d-9f91-cff1dc257b26\") " pod="openstack/cinder-1bfc-account-create-dfjx4" Sep 29 19:15:16 crc kubenswrapper[4792]: I0929 19:15:16.648651 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dx9kk\" (UniqueName: \"kubernetes.io/projected/9d341539-6c17-493b-bd37-e03f9e186459-kube-api-access-dx9kk\") pod \"neutron-2e1f-account-create-zc864\" (UID: \"9d341539-6c17-493b-bd37-e03f9e186459\") " pod="openstack/neutron-2e1f-account-create-zc864" Sep 29 19:15:16 crc kubenswrapper[4792]: I0929 19:15:16.660604 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-1bfc-account-create-dfjx4" Sep 29 19:15:16 crc kubenswrapper[4792]: I0929 19:15:16.750983 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dx9kk\" (UniqueName: \"kubernetes.io/projected/9d341539-6c17-493b-bd37-e03f9e186459-kube-api-access-dx9kk\") pod \"neutron-2e1f-account-create-zc864\" (UID: \"9d341539-6c17-493b-bd37-e03f9e186459\") " pod="openstack/neutron-2e1f-account-create-zc864" Sep 29 19:15:16 crc kubenswrapper[4792]: I0929 19:15:16.770549 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dx9kk\" (UniqueName: \"kubernetes.io/projected/9d341539-6c17-493b-bd37-e03f9e186459-kube-api-access-dx9kk\") pod \"neutron-2e1f-account-create-zc864\" (UID: \"9d341539-6c17-493b-bd37-e03f9e186459\") " pod="openstack/neutron-2e1f-account-create-zc864" Sep 29 19:15:16 crc kubenswrapper[4792]: I0929 19:15:16.927697 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-2e1f-account-create-zc864" Sep 29 19:15:17 crc kubenswrapper[4792]: I0929 19:15:17.039106 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1275aff8-0d26-4364-87aa-ce06b8944db1" path="/var/lib/kubelet/pods/1275aff8-0d26-4364-87aa-ce06b8944db1/volumes" Sep 29 19:15:17 crc kubenswrapper[4792]: I0929 19:15:17.039449 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5f59b8f679-lwxwr"] Sep 29 19:15:17 crc kubenswrapper[4792]: I0929 19:15:17.138468 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-9d5d-account-create-4l8xs"] Sep 29 19:15:17 crc kubenswrapper[4792]: W0929 19:15:17.140922 4792 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod305096ef_8cf0_4061_8153_17d2bbcb9e2b.slice/crio-21c5445ea8f6e409c7e22c34cb0e650fb7e46e358f3c9959e5d086f8271891fd WatchSource:0}: Error finding container 21c5445ea8f6e409c7e22c34cb0e650fb7e46e358f3c9959e5d086f8271891fd: Status 404 returned error can't find the container with id 21c5445ea8f6e409c7e22c34cb0e650fb7e46e358f3c9959e5d086f8271891fd Sep 29 19:15:17 crc kubenswrapper[4792]: I0929 19:15:17.192295 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-1bfc-account-create-dfjx4"] Sep 29 19:15:17 crc kubenswrapper[4792]: I0929 19:15:17.239958 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-9d5d-account-create-4l8xs" event={"ID":"305096ef-8cf0-4061-8153-17d2bbcb9e2b","Type":"ContainerStarted","Data":"21c5445ea8f6e409c7e22c34cb0e650fb7e46e358f3c9959e5d086f8271891fd"} Sep 29 19:15:17 crc kubenswrapper[4792]: I0929 19:15:17.241623 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5f59b8f679-lwxwr" event={"ID":"3c6ec0a6-e0bd-4183-9730-2a420004fcae","Type":"ContainerStarted","Data":"2cc6be9576da7b94bbb5937c47a05a178db8e1440d3cd3be3acf0818c2bdbe47"} Sep 29 19:15:17 crc kubenswrapper[4792]: I0929 19:15:17.244798 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-9mkf6" event={"ID":"ef8285b6-60ee-4b53-bb6a-ffb4c9a6bb1e","Type":"ContainerStarted","Data":"574e87433c33c96465cfa0f0b6655078ed2e11e589a1246263fbe17665512d46"} Sep 29 19:15:17 crc kubenswrapper[4792]: I0929 19:15:17.253913 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-1bfc-account-create-dfjx4" event={"ID":"43a6587c-7796-4d5d-9f91-cff1dc257b26","Type":"ContainerStarted","Data":"9fa91b9f9920e808bea1addb9250fb37e28b9e06c0834a68437cbe0e80c3cd2d"} Sep 29 19:15:17 crc kubenswrapper[4792]: I0929 19:15:17.304405 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-db-sync-9mkf6" podStartSLOduration=2.171518108 podStartE2EDuration="8.304379536s" podCreationTimestamp="2025-09-29 19:15:09 +0000 UTC" firstStartedPulling="2025-09-29 19:15:10.416094101 +0000 UTC m=+1122.409401517" lastFinishedPulling="2025-09-29 19:15:16.548955549 +0000 UTC m=+1128.542262945" observedRunningTime="2025-09-29 19:15:17.271382406 +0000 UTC m=+1129.264689802" watchObservedRunningTime="2025-09-29 19:15:17.304379536 +0000 UTC m=+1129.297686932" Sep 29 19:15:18 crc kubenswrapper[4792]: I0929 19:15:18.010041 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-2e1f-account-create-zc864"] Sep 29 19:15:18 crc kubenswrapper[4792]: W0929 19:15:18.016138 4792 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9d341539_6c17_493b_bd37_e03f9e186459.slice/crio-e32d76c96c528da18a61b5b1cf85db047cc77050d57937e73e15cc11706a249b WatchSource:0}: Error finding container e32d76c96c528da18a61b5b1cf85db047cc77050d57937e73e15cc11706a249b: Status 404 returned error can't find the container with id e32d76c96c528da18a61b5b1cf85db047cc77050d57937e73e15cc11706a249b Sep 29 19:15:18 crc kubenswrapper[4792]: I0929 19:15:18.266779 4792 generic.go:334] "Generic (PLEG): container finished" podID="9d341539-6c17-493b-bd37-e03f9e186459" containerID="e9c8879923b9d4a0ef247f86c02262810d140dbd1dfc395d1a47142fdc899868" exitCode=0 Sep 29 19:15:18 crc kubenswrapper[4792]: I0929 19:15:18.266885 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-2e1f-account-create-zc864" event={"ID":"9d341539-6c17-493b-bd37-e03f9e186459","Type":"ContainerDied","Data":"e9c8879923b9d4a0ef247f86c02262810d140dbd1dfc395d1a47142fdc899868"} Sep 29 19:15:18 crc kubenswrapper[4792]: I0929 19:15:18.267123 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-2e1f-account-create-zc864" event={"ID":"9d341539-6c17-493b-bd37-e03f9e186459","Type":"ContainerStarted","Data":"e32d76c96c528da18a61b5b1cf85db047cc77050d57937e73e15cc11706a249b"} Sep 29 19:15:18 crc kubenswrapper[4792]: I0929 19:15:18.279899 4792 generic.go:334] "Generic (PLEG): container finished" podID="305096ef-8cf0-4061-8153-17d2bbcb9e2b" containerID="ac5855c219ce190769c0ea0b9d071a413c64e2c845189e35bf197ccbdb65ccf1" exitCode=0 Sep 29 19:15:18 crc kubenswrapper[4792]: I0929 19:15:18.281819 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-9d5d-account-create-4l8xs" event={"ID":"305096ef-8cf0-4061-8153-17d2bbcb9e2b","Type":"ContainerDied","Data":"ac5855c219ce190769c0ea0b9d071a413c64e2c845189e35bf197ccbdb65ccf1"} Sep 29 19:15:18 crc kubenswrapper[4792]: I0929 19:15:18.293615 4792 generic.go:334] "Generic (PLEG): container finished" podID="3c6ec0a6-e0bd-4183-9730-2a420004fcae" containerID="db32ad89f74f89be92052d940e995f43f6bbdea5b643bac23fdb00b57e0b73cd" exitCode=0 Sep 29 19:15:18 crc kubenswrapper[4792]: I0929 19:15:18.293690 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5f59b8f679-lwxwr" event={"ID":"3c6ec0a6-e0bd-4183-9730-2a420004fcae","Type":"ContainerDied","Data":"db32ad89f74f89be92052d940e995f43f6bbdea5b643bac23fdb00b57e0b73cd"} Sep 29 19:15:18 crc kubenswrapper[4792]: I0929 19:15:18.297288 4792 generic.go:334] "Generic (PLEG): container finished" podID="43a6587c-7796-4d5d-9f91-cff1dc257b26" containerID="87f8eb721d8b61854fb6eb07a02c7a138769598716b60771d768cb9b3442fcb6" exitCode=0 Sep 29 19:15:18 crc kubenswrapper[4792]: I0929 19:15:18.297364 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-1bfc-account-create-dfjx4" event={"ID":"43a6587c-7796-4d5d-9f91-cff1dc257b26","Type":"ContainerDied","Data":"87f8eb721d8b61854fb6eb07a02c7a138769598716b60771d768cb9b3442fcb6"} Sep 29 19:15:19 crc kubenswrapper[4792]: I0929 19:15:19.310040 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5f59b8f679-lwxwr" event={"ID":"3c6ec0a6-e0bd-4183-9730-2a420004fcae","Type":"ContainerStarted","Data":"445bba3cfb494ada92f61028b400aab7f98cea6e03fce8c20bbc624a04a73c55"} Sep 29 19:15:19 crc kubenswrapper[4792]: I0929 19:15:19.338825 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-5f59b8f679-lwxwr" podStartSLOduration=6.338802012 podStartE2EDuration="6.338802012s" podCreationTimestamp="2025-09-29 19:15:13 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 19:15:19.334339153 +0000 UTC m=+1131.327646569" watchObservedRunningTime="2025-09-29 19:15:19.338802012 +0000 UTC m=+1131.332109408" Sep 29 19:15:19 crc kubenswrapper[4792]: I0929 19:15:19.779144 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-1bfc-account-create-dfjx4" Sep 29 19:15:19 crc kubenswrapper[4792]: I0929 19:15:19.790191 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-2e1f-account-create-zc864" Sep 29 19:15:19 crc kubenswrapper[4792]: I0929 19:15:19.799431 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-9d5d-account-create-4l8xs" Sep 29 19:15:19 crc kubenswrapper[4792]: I0929 19:15:19.805064 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nr4jk\" (UniqueName: \"kubernetes.io/projected/43a6587c-7796-4d5d-9f91-cff1dc257b26-kube-api-access-nr4jk\") pod \"43a6587c-7796-4d5d-9f91-cff1dc257b26\" (UID: \"43a6587c-7796-4d5d-9f91-cff1dc257b26\") " Sep 29 19:15:19 crc kubenswrapper[4792]: I0929 19:15:19.805162 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dx9kk\" (UniqueName: \"kubernetes.io/projected/9d341539-6c17-493b-bd37-e03f9e186459-kube-api-access-dx9kk\") pod \"9d341539-6c17-493b-bd37-e03f9e186459\" (UID: \"9d341539-6c17-493b-bd37-e03f9e186459\") " Sep 29 19:15:19 crc kubenswrapper[4792]: I0929 19:15:19.805222 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pmqc9\" (UniqueName: \"kubernetes.io/projected/305096ef-8cf0-4061-8153-17d2bbcb9e2b-kube-api-access-pmqc9\") pod \"305096ef-8cf0-4061-8153-17d2bbcb9e2b\" (UID: \"305096ef-8cf0-4061-8153-17d2bbcb9e2b\") " Sep 29 19:15:19 crc kubenswrapper[4792]: I0929 19:15:19.811096 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/43a6587c-7796-4d5d-9f91-cff1dc257b26-kube-api-access-nr4jk" (OuterVolumeSpecName: "kube-api-access-nr4jk") pod "43a6587c-7796-4d5d-9f91-cff1dc257b26" (UID: "43a6587c-7796-4d5d-9f91-cff1dc257b26"). InnerVolumeSpecName "kube-api-access-nr4jk". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:15:19 crc kubenswrapper[4792]: I0929 19:15:19.815758 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9d341539-6c17-493b-bd37-e03f9e186459-kube-api-access-dx9kk" (OuterVolumeSpecName: "kube-api-access-dx9kk") pod "9d341539-6c17-493b-bd37-e03f9e186459" (UID: "9d341539-6c17-493b-bd37-e03f9e186459"). InnerVolumeSpecName "kube-api-access-dx9kk". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:15:19 crc kubenswrapper[4792]: I0929 19:15:19.832494 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/305096ef-8cf0-4061-8153-17d2bbcb9e2b-kube-api-access-pmqc9" (OuterVolumeSpecName: "kube-api-access-pmqc9") pod "305096ef-8cf0-4061-8153-17d2bbcb9e2b" (UID: "305096ef-8cf0-4061-8153-17d2bbcb9e2b"). InnerVolumeSpecName "kube-api-access-pmqc9". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:15:19 crc kubenswrapper[4792]: I0929 19:15:19.907619 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nr4jk\" (UniqueName: \"kubernetes.io/projected/43a6587c-7796-4d5d-9f91-cff1dc257b26-kube-api-access-nr4jk\") on node \"crc\" DevicePath \"\"" Sep 29 19:15:19 crc kubenswrapper[4792]: I0929 19:15:19.907650 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dx9kk\" (UniqueName: \"kubernetes.io/projected/9d341539-6c17-493b-bd37-e03f9e186459-kube-api-access-dx9kk\") on node \"crc\" DevicePath \"\"" Sep 29 19:15:19 crc kubenswrapper[4792]: I0929 19:15:19.907659 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pmqc9\" (UniqueName: \"kubernetes.io/projected/305096ef-8cf0-4061-8153-17d2bbcb9e2b-kube-api-access-pmqc9\") on node \"crc\" DevicePath \"\"" Sep 29 19:15:20 crc kubenswrapper[4792]: I0929 19:15:20.318977 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-9d5d-account-create-4l8xs" event={"ID":"305096ef-8cf0-4061-8153-17d2bbcb9e2b","Type":"ContainerDied","Data":"21c5445ea8f6e409c7e22c34cb0e650fb7e46e358f3c9959e5d086f8271891fd"} Sep 29 19:15:20 crc kubenswrapper[4792]: I0929 19:15:20.319218 4792 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="21c5445ea8f6e409c7e22c34cb0e650fb7e46e358f3c9959e5d086f8271891fd" Sep 29 19:15:20 crc kubenswrapper[4792]: I0929 19:15:20.319032 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-9d5d-account-create-4l8xs" Sep 29 19:15:20 crc kubenswrapper[4792]: I0929 19:15:20.320925 4792 generic.go:334] "Generic (PLEG): container finished" podID="ef8285b6-60ee-4b53-bb6a-ffb4c9a6bb1e" containerID="574e87433c33c96465cfa0f0b6655078ed2e11e589a1246263fbe17665512d46" exitCode=0 Sep 29 19:15:20 crc kubenswrapper[4792]: I0929 19:15:20.320999 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-9mkf6" event={"ID":"ef8285b6-60ee-4b53-bb6a-ffb4c9a6bb1e","Type":"ContainerDied","Data":"574e87433c33c96465cfa0f0b6655078ed2e11e589a1246263fbe17665512d46"} Sep 29 19:15:20 crc kubenswrapper[4792]: I0929 19:15:20.323648 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-1bfc-account-create-dfjx4" event={"ID":"43a6587c-7796-4d5d-9f91-cff1dc257b26","Type":"ContainerDied","Data":"9fa91b9f9920e808bea1addb9250fb37e28b9e06c0834a68437cbe0e80c3cd2d"} Sep 29 19:15:20 crc kubenswrapper[4792]: I0929 19:15:20.323687 4792 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9fa91b9f9920e808bea1addb9250fb37e28b9e06c0834a68437cbe0e80c3cd2d" Sep 29 19:15:20 crc kubenswrapper[4792]: I0929 19:15:20.323750 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-1bfc-account-create-dfjx4" Sep 29 19:15:20 crc kubenswrapper[4792]: I0929 19:15:20.326289 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-2e1f-account-create-zc864" Sep 29 19:15:20 crc kubenswrapper[4792]: I0929 19:15:20.326336 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-2e1f-account-create-zc864" event={"ID":"9d341539-6c17-493b-bd37-e03f9e186459","Type":"ContainerDied","Data":"e32d76c96c528da18a61b5b1cf85db047cc77050d57937e73e15cc11706a249b"} Sep 29 19:15:20 crc kubenswrapper[4792]: I0929 19:15:20.326363 4792 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e32d76c96c528da18a61b5b1cf85db047cc77050d57937e73e15cc11706a249b" Sep 29 19:15:20 crc kubenswrapper[4792]: I0929 19:15:20.326400 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-5f59b8f679-lwxwr" Sep 29 19:15:21 crc kubenswrapper[4792]: I0929 19:15:21.731004 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-9mkf6" Sep 29 19:15:21 crc kubenswrapper[4792]: I0929 19:15:21.838711 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ef8285b6-60ee-4b53-bb6a-ffb4c9a6bb1e-combined-ca-bundle\") pod \"ef8285b6-60ee-4b53-bb6a-ffb4c9a6bb1e\" (UID: \"ef8285b6-60ee-4b53-bb6a-ffb4c9a6bb1e\") " Sep 29 19:15:21 crc kubenswrapper[4792]: I0929 19:15:21.838905 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d5g6q\" (UniqueName: \"kubernetes.io/projected/ef8285b6-60ee-4b53-bb6a-ffb4c9a6bb1e-kube-api-access-d5g6q\") pod \"ef8285b6-60ee-4b53-bb6a-ffb4c9a6bb1e\" (UID: \"ef8285b6-60ee-4b53-bb6a-ffb4c9a6bb1e\") " Sep 29 19:15:21 crc kubenswrapper[4792]: I0929 19:15:21.838947 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ef8285b6-60ee-4b53-bb6a-ffb4c9a6bb1e-config-data\") pod \"ef8285b6-60ee-4b53-bb6a-ffb4c9a6bb1e\" (UID: \"ef8285b6-60ee-4b53-bb6a-ffb4c9a6bb1e\") " Sep 29 19:15:21 crc kubenswrapper[4792]: I0929 19:15:21.857115 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ef8285b6-60ee-4b53-bb6a-ffb4c9a6bb1e-kube-api-access-d5g6q" (OuterVolumeSpecName: "kube-api-access-d5g6q") pod "ef8285b6-60ee-4b53-bb6a-ffb4c9a6bb1e" (UID: "ef8285b6-60ee-4b53-bb6a-ffb4c9a6bb1e"). InnerVolumeSpecName "kube-api-access-d5g6q". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:15:21 crc kubenswrapper[4792]: I0929 19:15:21.888023 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ef8285b6-60ee-4b53-bb6a-ffb4c9a6bb1e-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "ef8285b6-60ee-4b53-bb6a-ffb4c9a6bb1e" (UID: "ef8285b6-60ee-4b53-bb6a-ffb4c9a6bb1e"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:15:21 crc kubenswrapper[4792]: I0929 19:15:21.897764 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ef8285b6-60ee-4b53-bb6a-ffb4c9a6bb1e-config-data" (OuterVolumeSpecName: "config-data") pod "ef8285b6-60ee-4b53-bb6a-ffb4c9a6bb1e" (UID: "ef8285b6-60ee-4b53-bb6a-ffb4c9a6bb1e"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:15:21 crc kubenswrapper[4792]: I0929 19:15:21.940210 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d5g6q\" (UniqueName: \"kubernetes.io/projected/ef8285b6-60ee-4b53-bb6a-ffb4c9a6bb1e-kube-api-access-d5g6q\") on node \"crc\" DevicePath \"\"" Sep 29 19:15:21 crc kubenswrapper[4792]: I0929 19:15:21.940434 4792 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ef8285b6-60ee-4b53-bb6a-ffb4c9a6bb1e-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 19:15:21 crc kubenswrapper[4792]: I0929 19:15:21.940487 4792 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ef8285b6-60ee-4b53-bb6a-ffb4c9a6bb1e-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 19:15:22 crc kubenswrapper[4792]: I0929 19:15:22.341337 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-9mkf6" event={"ID":"ef8285b6-60ee-4b53-bb6a-ffb4c9a6bb1e","Type":"ContainerDied","Data":"47d23d282f55a650238ec179a88ac2a12f171cba8507047881677a14138723a2"} Sep 29 19:15:22 crc kubenswrapper[4792]: I0929 19:15:22.341711 4792 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="47d23d282f55a650238ec179a88ac2a12f171cba8507047881677a14138723a2" Sep 29 19:15:22 crc kubenswrapper[4792]: I0929 19:15:22.341391 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-9mkf6" Sep 29 19:15:22 crc kubenswrapper[4792]: I0929 19:15:22.586224 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5f59b8f679-lwxwr"] Sep 29 19:15:22 crc kubenswrapper[4792]: I0929 19:15:22.586526 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-5f59b8f679-lwxwr" podUID="3c6ec0a6-e0bd-4183-9730-2a420004fcae" containerName="dnsmasq-dns" containerID="cri-o://445bba3cfb494ada92f61028b400aab7f98cea6e03fce8c20bbc624a04a73c55" gracePeriod=10 Sep 29 19:15:22 crc kubenswrapper[4792]: I0929 19:15:22.641487 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-bootstrap-5dhfl"] Sep 29 19:15:22 crc kubenswrapper[4792]: E0929 19:15:22.641945 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="305096ef-8cf0-4061-8153-17d2bbcb9e2b" containerName="mariadb-account-create" Sep 29 19:15:22 crc kubenswrapper[4792]: I0929 19:15:22.641970 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="305096ef-8cf0-4061-8153-17d2bbcb9e2b" containerName="mariadb-account-create" Sep 29 19:15:22 crc kubenswrapper[4792]: E0929 19:15:22.641988 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9d341539-6c17-493b-bd37-e03f9e186459" containerName="mariadb-account-create" Sep 29 19:15:22 crc kubenswrapper[4792]: I0929 19:15:22.641996 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="9d341539-6c17-493b-bd37-e03f9e186459" containerName="mariadb-account-create" Sep 29 19:15:22 crc kubenswrapper[4792]: E0929 19:15:22.642006 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ef8285b6-60ee-4b53-bb6a-ffb4c9a6bb1e" containerName="keystone-db-sync" Sep 29 19:15:22 crc kubenswrapper[4792]: I0929 19:15:22.642013 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="ef8285b6-60ee-4b53-bb6a-ffb4c9a6bb1e" containerName="keystone-db-sync" Sep 29 19:15:22 crc kubenswrapper[4792]: E0929 19:15:22.642033 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="43a6587c-7796-4d5d-9f91-cff1dc257b26" containerName="mariadb-account-create" Sep 29 19:15:22 crc kubenswrapper[4792]: I0929 19:15:22.642040 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="43a6587c-7796-4d5d-9f91-cff1dc257b26" containerName="mariadb-account-create" Sep 29 19:15:22 crc kubenswrapper[4792]: I0929 19:15:22.642258 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="9d341539-6c17-493b-bd37-e03f9e186459" containerName="mariadb-account-create" Sep 29 19:15:22 crc kubenswrapper[4792]: I0929 19:15:22.642288 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="43a6587c-7796-4d5d-9f91-cff1dc257b26" containerName="mariadb-account-create" Sep 29 19:15:22 crc kubenswrapper[4792]: I0929 19:15:22.642304 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="ef8285b6-60ee-4b53-bb6a-ffb4c9a6bb1e" containerName="keystone-db-sync" Sep 29 19:15:22 crc kubenswrapper[4792]: I0929 19:15:22.642321 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="305096ef-8cf0-4061-8153-17d2bbcb9e2b" containerName="mariadb-account-create" Sep 29 19:15:22 crc kubenswrapper[4792]: I0929 19:15:22.643014 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-5dhfl" Sep 29 19:15:22 crc kubenswrapper[4792]: I0929 19:15:22.647239 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-58chc" Sep 29 19:15:22 crc kubenswrapper[4792]: I0929 19:15:22.647304 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Sep 29 19:15:22 crc kubenswrapper[4792]: I0929 19:15:22.647447 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Sep 29 19:15:22 crc kubenswrapper[4792]: I0929 19:15:22.650174 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e5b356b4-9ba6-4971-966f-be9d4160b57d-config-data\") pod \"keystone-bootstrap-5dhfl\" (UID: \"e5b356b4-9ba6-4971-966f-be9d4160b57d\") " pod="openstack/keystone-bootstrap-5dhfl" Sep 29 19:15:22 crc kubenswrapper[4792]: I0929 19:15:22.650299 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Sep 29 19:15:22 crc kubenswrapper[4792]: I0929 19:15:22.650305 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e5b356b4-9ba6-4971-966f-be9d4160b57d-scripts\") pod \"keystone-bootstrap-5dhfl\" (UID: \"e5b356b4-9ba6-4971-966f-be9d4160b57d\") " pod="openstack/keystone-bootstrap-5dhfl" Sep 29 19:15:22 crc kubenswrapper[4792]: I0929 19:15:22.650392 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e5b356b4-9ba6-4971-966f-be9d4160b57d-combined-ca-bundle\") pod \"keystone-bootstrap-5dhfl\" (UID: \"e5b356b4-9ba6-4971-966f-be9d4160b57d\") " pod="openstack/keystone-bootstrap-5dhfl" Sep 29 19:15:22 crc kubenswrapper[4792]: I0929 19:15:22.650430 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/e5b356b4-9ba6-4971-966f-be9d4160b57d-fernet-keys\") pod \"keystone-bootstrap-5dhfl\" (UID: \"e5b356b4-9ba6-4971-966f-be9d4160b57d\") " pod="openstack/keystone-bootstrap-5dhfl" Sep 29 19:15:22 crc kubenswrapper[4792]: I0929 19:15:22.650562 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/e5b356b4-9ba6-4971-966f-be9d4160b57d-credential-keys\") pod \"keystone-bootstrap-5dhfl\" (UID: \"e5b356b4-9ba6-4971-966f-be9d4160b57d\") " pod="openstack/keystone-bootstrap-5dhfl" Sep 29 19:15:22 crc kubenswrapper[4792]: I0929 19:15:22.650605 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vnmv2\" (UniqueName: \"kubernetes.io/projected/e5b356b4-9ba6-4971-966f-be9d4160b57d-kube-api-access-vnmv2\") pod \"keystone-bootstrap-5dhfl\" (UID: \"e5b356b4-9ba6-4971-966f-be9d4160b57d\") " pod="openstack/keystone-bootstrap-5dhfl" Sep 29 19:15:22 crc kubenswrapper[4792]: I0929 19:15:22.655607 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-bbf5cc879-sz2kd"] Sep 29 19:15:22 crc kubenswrapper[4792]: I0929 19:15:22.669691 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-bbf5cc879-sz2kd" Sep 29 19:15:22 crc kubenswrapper[4792]: I0929 19:15:22.700982 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-5dhfl"] Sep 29 19:15:22 crc kubenswrapper[4792]: I0929 19:15:22.752695 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/f9c73d3d-cf62-4d9c-8710-31fab6a99650-dns-swift-storage-0\") pod \"dnsmasq-dns-bbf5cc879-sz2kd\" (UID: \"f9c73d3d-cf62-4d9c-8710-31fab6a99650\") " pod="openstack/dnsmasq-dns-bbf5cc879-sz2kd" Sep 29 19:15:22 crc kubenswrapper[4792]: I0929 19:15:22.752762 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e5b356b4-9ba6-4971-966f-be9d4160b57d-scripts\") pod \"keystone-bootstrap-5dhfl\" (UID: \"e5b356b4-9ba6-4971-966f-be9d4160b57d\") " pod="openstack/keystone-bootstrap-5dhfl" Sep 29 19:15:22 crc kubenswrapper[4792]: I0929 19:15:22.752788 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e5b356b4-9ba6-4971-966f-be9d4160b57d-combined-ca-bundle\") pod \"keystone-bootstrap-5dhfl\" (UID: \"e5b356b4-9ba6-4971-966f-be9d4160b57d\") " pod="openstack/keystone-bootstrap-5dhfl" Sep 29 19:15:22 crc kubenswrapper[4792]: I0929 19:15:22.752813 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f9c73d3d-cf62-4d9c-8710-31fab6a99650-config\") pod \"dnsmasq-dns-bbf5cc879-sz2kd\" (UID: \"f9c73d3d-cf62-4d9c-8710-31fab6a99650\") " pod="openstack/dnsmasq-dns-bbf5cc879-sz2kd" Sep 29 19:15:22 crc kubenswrapper[4792]: I0929 19:15:22.752892 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/e5b356b4-9ba6-4971-966f-be9d4160b57d-fernet-keys\") pod \"keystone-bootstrap-5dhfl\" (UID: \"e5b356b4-9ba6-4971-966f-be9d4160b57d\") " pod="openstack/keystone-bootstrap-5dhfl" Sep 29 19:15:22 crc kubenswrapper[4792]: I0929 19:15:22.752932 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f9c73d3d-cf62-4d9c-8710-31fab6a99650-dns-svc\") pod \"dnsmasq-dns-bbf5cc879-sz2kd\" (UID: \"f9c73d3d-cf62-4d9c-8710-31fab6a99650\") " pod="openstack/dnsmasq-dns-bbf5cc879-sz2kd" Sep 29 19:15:22 crc kubenswrapper[4792]: I0929 19:15:22.752952 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/f9c73d3d-cf62-4d9c-8710-31fab6a99650-ovsdbserver-nb\") pod \"dnsmasq-dns-bbf5cc879-sz2kd\" (UID: \"f9c73d3d-cf62-4d9c-8710-31fab6a99650\") " pod="openstack/dnsmasq-dns-bbf5cc879-sz2kd" Sep 29 19:15:22 crc kubenswrapper[4792]: I0929 19:15:22.752984 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/e5b356b4-9ba6-4971-966f-be9d4160b57d-credential-keys\") pod \"keystone-bootstrap-5dhfl\" (UID: \"e5b356b4-9ba6-4971-966f-be9d4160b57d\") " pod="openstack/keystone-bootstrap-5dhfl" Sep 29 19:15:22 crc kubenswrapper[4792]: I0929 19:15:22.753001 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2kvvt\" (UniqueName: \"kubernetes.io/projected/f9c73d3d-cf62-4d9c-8710-31fab6a99650-kube-api-access-2kvvt\") pod \"dnsmasq-dns-bbf5cc879-sz2kd\" (UID: \"f9c73d3d-cf62-4d9c-8710-31fab6a99650\") " pod="openstack/dnsmasq-dns-bbf5cc879-sz2kd" Sep 29 19:15:22 crc kubenswrapper[4792]: I0929 19:15:22.753021 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vnmv2\" (UniqueName: \"kubernetes.io/projected/e5b356b4-9ba6-4971-966f-be9d4160b57d-kube-api-access-vnmv2\") pod \"keystone-bootstrap-5dhfl\" (UID: \"e5b356b4-9ba6-4971-966f-be9d4160b57d\") " pod="openstack/keystone-bootstrap-5dhfl" Sep 29 19:15:22 crc kubenswrapper[4792]: I0929 19:15:22.753048 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e5b356b4-9ba6-4971-966f-be9d4160b57d-config-data\") pod \"keystone-bootstrap-5dhfl\" (UID: \"e5b356b4-9ba6-4971-966f-be9d4160b57d\") " pod="openstack/keystone-bootstrap-5dhfl" Sep 29 19:15:22 crc kubenswrapper[4792]: I0929 19:15:22.753066 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/f9c73d3d-cf62-4d9c-8710-31fab6a99650-ovsdbserver-sb\") pod \"dnsmasq-dns-bbf5cc879-sz2kd\" (UID: \"f9c73d3d-cf62-4d9c-8710-31fab6a99650\") " pod="openstack/dnsmasq-dns-bbf5cc879-sz2kd" Sep 29 19:15:22 crc kubenswrapper[4792]: I0929 19:15:22.772985 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-bbf5cc879-sz2kd"] Sep 29 19:15:22 crc kubenswrapper[4792]: I0929 19:15:22.773630 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e5b356b4-9ba6-4971-966f-be9d4160b57d-config-data\") pod \"keystone-bootstrap-5dhfl\" (UID: \"e5b356b4-9ba6-4971-966f-be9d4160b57d\") " pod="openstack/keystone-bootstrap-5dhfl" Sep 29 19:15:22 crc kubenswrapper[4792]: I0929 19:15:22.783247 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e5b356b4-9ba6-4971-966f-be9d4160b57d-scripts\") pod \"keystone-bootstrap-5dhfl\" (UID: \"e5b356b4-9ba6-4971-966f-be9d4160b57d\") " pod="openstack/keystone-bootstrap-5dhfl" Sep 29 19:15:22 crc kubenswrapper[4792]: I0929 19:15:22.786822 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e5b356b4-9ba6-4971-966f-be9d4160b57d-combined-ca-bundle\") pod \"keystone-bootstrap-5dhfl\" (UID: \"e5b356b4-9ba6-4971-966f-be9d4160b57d\") " pod="openstack/keystone-bootstrap-5dhfl" Sep 29 19:15:22 crc kubenswrapper[4792]: I0929 19:15:22.788199 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/e5b356b4-9ba6-4971-966f-be9d4160b57d-credential-keys\") pod \"keystone-bootstrap-5dhfl\" (UID: \"e5b356b4-9ba6-4971-966f-be9d4160b57d\") " pod="openstack/keystone-bootstrap-5dhfl" Sep 29 19:15:22 crc kubenswrapper[4792]: I0929 19:15:22.790622 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/e5b356b4-9ba6-4971-966f-be9d4160b57d-fernet-keys\") pod \"keystone-bootstrap-5dhfl\" (UID: \"e5b356b4-9ba6-4971-966f-be9d4160b57d\") " pod="openstack/keystone-bootstrap-5dhfl" Sep 29 19:15:22 crc kubenswrapper[4792]: I0929 19:15:22.795248 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vnmv2\" (UniqueName: \"kubernetes.io/projected/e5b356b4-9ba6-4971-966f-be9d4160b57d-kube-api-access-vnmv2\") pod \"keystone-bootstrap-5dhfl\" (UID: \"e5b356b4-9ba6-4971-966f-be9d4160b57d\") " pod="openstack/keystone-bootstrap-5dhfl" Sep 29 19:15:22 crc kubenswrapper[4792]: I0929 19:15:22.854026 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2kvvt\" (UniqueName: \"kubernetes.io/projected/f9c73d3d-cf62-4d9c-8710-31fab6a99650-kube-api-access-2kvvt\") pod \"dnsmasq-dns-bbf5cc879-sz2kd\" (UID: \"f9c73d3d-cf62-4d9c-8710-31fab6a99650\") " pod="openstack/dnsmasq-dns-bbf5cc879-sz2kd" Sep 29 19:15:22 crc kubenswrapper[4792]: I0929 19:15:22.854091 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/f9c73d3d-cf62-4d9c-8710-31fab6a99650-ovsdbserver-sb\") pod \"dnsmasq-dns-bbf5cc879-sz2kd\" (UID: \"f9c73d3d-cf62-4d9c-8710-31fab6a99650\") " pod="openstack/dnsmasq-dns-bbf5cc879-sz2kd" Sep 29 19:15:22 crc kubenswrapper[4792]: I0929 19:15:22.854142 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/f9c73d3d-cf62-4d9c-8710-31fab6a99650-dns-swift-storage-0\") pod \"dnsmasq-dns-bbf5cc879-sz2kd\" (UID: \"f9c73d3d-cf62-4d9c-8710-31fab6a99650\") " pod="openstack/dnsmasq-dns-bbf5cc879-sz2kd" Sep 29 19:15:22 crc kubenswrapper[4792]: I0929 19:15:22.854166 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f9c73d3d-cf62-4d9c-8710-31fab6a99650-config\") pod \"dnsmasq-dns-bbf5cc879-sz2kd\" (UID: \"f9c73d3d-cf62-4d9c-8710-31fab6a99650\") " pod="openstack/dnsmasq-dns-bbf5cc879-sz2kd" Sep 29 19:15:22 crc kubenswrapper[4792]: I0929 19:15:22.854219 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f9c73d3d-cf62-4d9c-8710-31fab6a99650-dns-svc\") pod \"dnsmasq-dns-bbf5cc879-sz2kd\" (UID: \"f9c73d3d-cf62-4d9c-8710-31fab6a99650\") " pod="openstack/dnsmasq-dns-bbf5cc879-sz2kd" Sep 29 19:15:22 crc kubenswrapper[4792]: I0929 19:15:22.854238 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/f9c73d3d-cf62-4d9c-8710-31fab6a99650-ovsdbserver-nb\") pod \"dnsmasq-dns-bbf5cc879-sz2kd\" (UID: \"f9c73d3d-cf62-4d9c-8710-31fab6a99650\") " pod="openstack/dnsmasq-dns-bbf5cc879-sz2kd" Sep 29 19:15:22 crc kubenswrapper[4792]: I0929 19:15:22.855415 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/f9c73d3d-cf62-4d9c-8710-31fab6a99650-ovsdbserver-sb\") pod \"dnsmasq-dns-bbf5cc879-sz2kd\" (UID: \"f9c73d3d-cf62-4d9c-8710-31fab6a99650\") " pod="openstack/dnsmasq-dns-bbf5cc879-sz2kd" Sep 29 19:15:22 crc kubenswrapper[4792]: I0929 19:15:22.855444 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/f9c73d3d-cf62-4d9c-8710-31fab6a99650-dns-swift-storage-0\") pod \"dnsmasq-dns-bbf5cc879-sz2kd\" (UID: \"f9c73d3d-cf62-4d9c-8710-31fab6a99650\") " pod="openstack/dnsmasq-dns-bbf5cc879-sz2kd" Sep 29 19:15:22 crc kubenswrapper[4792]: I0929 19:15:22.855471 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f9c73d3d-cf62-4d9c-8710-31fab6a99650-config\") pod \"dnsmasq-dns-bbf5cc879-sz2kd\" (UID: \"f9c73d3d-cf62-4d9c-8710-31fab6a99650\") " pod="openstack/dnsmasq-dns-bbf5cc879-sz2kd" Sep 29 19:15:22 crc kubenswrapper[4792]: I0929 19:15:22.856044 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f9c73d3d-cf62-4d9c-8710-31fab6a99650-dns-svc\") pod \"dnsmasq-dns-bbf5cc879-sz2kd\" (UID: \"f9c73d3d-cf62-4d9c-8710-31fab6a99650\") " pod="openstack/dnsmasq-dns-bbf5cc879-sz2kd" Sep 29 19:15:22 crc kubenswrapper[4792]: I0929 19:15:22.856340 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/f9c73d3d-cf62-4d9c-8710-31fab6a99650-ovsdbserver-nb\") pod \"dnsmasq-dns-bbf5cc879-sz2kd\" (UID: \"f9c73d3d-cf62-4d9c-8710-31fab6a99650\") " pod="openstack/dnsmasq-dns-bbf5cc879-sz2kd" Sep 29 19:15:22 crc kubenswrapper[4792]: I0929 19:15:22.919506 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2kvvt\" (UniqueName: \"kubernetes.io/projected/f9c73d3d-cf62-4d9c-8710-31fab6a99650-kube-api-access-2kvvt\") pod \"dnsmasq-dns-bbf5cc879-sz2kd\" (UID: \"f9c73d3d-cf62-4d9c-8710-31fab6a99650\") " pod="openstack/dnsmasq-dns-bbf5cc879-sz2kd" Sep 29 19:15:22 crc kubenswrapper[4792]: I0929 19:15:22.927585 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-db-sync-j6jb8"] Sep 29 19:15:22 crc kubenswrapper[4792]: I0929 19:15:22.928611 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-j6jb8" Sep 29 19:15:22 crc kubenswrapper[4792]: W0929 19:15:22.933314 4792 reflector.go:561] object-"openstack"/"neutron-httpd-config": failed to list *v1.Secret: secrets "neutron-httpd-config" is forbidden: User "system:node:crc" cannot list resource "secrets" in API group "" in the namespace "openstack": no relationship found between node 'crc' and this object Sep 29 19:15:22 crc kubenswrapper[4792]: E0929 19:15:22.933365 4792 reflector.go:158] "Unhandled Error" err="object-\"openstack\"/\"neutron-httpd-config\": Failed to watch *v1.Secret: failed to list *v1.Secret: secrets \"neutron-httpd-config\" is forbidden: User \"system:node:crc\" cannot list resource \"secrets\" in API group \"\" in the namespace \"openstack\": no relationship found between node 'crc' and this object" logger="UnhandledError" Sep 29 19:15:22 crc kubenswrapper[4792]: W0929 19:15:22.933406 4792 reflector.go:561] object-"openstack"/"neutron-config": failed to list *v1.Secret: secrets "neutron-config" is forbidden: User "system:node:crc" cannot list resource "secrets" in API group "" in the namespace "openstack": no relationship found between node 'crc' and this object Sep 29 19:15:22 crc kubenswrapper[4792]: E0929 19:15:22.933419 4792 reflector.go:158] "Unhandled Error" err="object-\"openstack\"/\"neutron-config\": Failed to watch *v1.Secret: failed to list *v1.Secret: secrets \"neutron-config\" is forbidden: User \"system:node:crc\" cannot list resource \"secrets\" in API group \"\" in the namespace \"openstack\": no relationship found between node 'crc' and this object" logger="UnhandledError" Sep 29 19:15:22 crc kubenswrapper[4792]: I0929 19:15:22.957577 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/72bc741a-4542-48fb-b65c-c7a12570d80a-config\") pod \"neutron-db-sync-j6jb8\" (UID: \"72bc741a-4542-48fb-b65c-c7a12570d80a\") " pod="openstack/neutron-db-sync-j6jb8" Sep 29 19:15:22 crc kubenswrapper[4792]: I0929 19:15:22.957663 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/72bc741a-4542-48fb-b65c-c7a12570d80a-combined-ca-bundle\") pod \"neutron-db-sync-j6jb8\" (UID: \"72bc741a-4542-48fb-b65c-c7a12570d80a\") " pod="openstack/neutron-db-sync-j6jb8" Sep 29 19:15:22 crc kubenswrapper[4792]: I0929 19:15:22.957709 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hldjl\" (UniqueName: \"kubernetes.io/projected/72bc741a-4542-48fb-b65c-c7a12570d80a-kube-api-access-hldjl\") pod \"neutron-db-sync-j6jb8\" (UID: \"72bc741a-4542-48fb-b65c-c7a12570d80a\") " pod="openstack/neutron-db-sync-j6jb8" Sep 29 19:15:22 crc kubenswrapper[4792]: W0929 19:15:22.957907 4792 reflector.go:561] object-"openstack"/"neutron-neutron-dockercfg-jk6r4": failed to list *v1.Secret: secrets "neutron-neutron-dockercfg-jk6r4" is forbidden: User "system:node:crc" cannot list resource "secrets" in API group "" in the namespace "openstack": no relationship found between node 'crc' and this object Sep 29 19:15:22 crc kubenswrapper[4792]: E0929 19:15:22.957938 4792 reflector.go:158] "Unhandled Error" err="object-\"openstack\"/\"neutron-neutron-dockercfg-jk6r4\": Failed to watch *v1.Secret: failed to list *v1.Secret: secrets \"neutron-neutron-dockercfg-jk6r4\" is forbidden: User \"system:node:crc\" cannot list resource \"secrets\" in API group \"\" in the namespace \"openstack\": no relationship found between node 'crc' and this object" logger="UnhandledError" Sep 29 19:15:22 crc kubenswrapper[4792]: I0929 19:15:22.972613 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-sync-j6jb8"] Sep 29 19:15:22 crc kubenswrapper[4792]: I0929 19:15:22.986119 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-db-sync-cvlgm"] Sep 29 19:15:22 crc kubenswrapper[4792]: I0929 19:15:22.987126 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-cvlgm" Sep 29 19:15:22 crc kubenswrapper[4792]: I0929 19:15:22.992073 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scripts" Sep 29 19:15:22 crc kubenswrapper[4792]: I0929 19:15:22.992266 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-config-data" Sep 29 19:15:22 crc kubenswrapper[4792]: I0929 19:15:22.992441 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-cinder-dockercfg-xqr5j" Sep 29 19:15:23 crc kubenswrapper[4792]: I0929 19:15:23.010431 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-7486b4cb6c-fr9s4"] Sep 29 19:15:23 crc kubenswrapper[4792]: I0929 19:15:23.011990 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-7486b4cb6c-fr9s4" Sep 29 19:15:23 crc kubenswrapper[4792]: I0929 19:15:23.014974 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"horizon-scripts" Sep 29 19:15:23 crc kubenswrapper[4792]: I0929 19:15:23.015176 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"horizon-horizon-dockercfg-v42kf" Sep 29 19:15:23 crc kubenswrapper[4792]: I0929 19:15:23.015296 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"horizon-config-data" Sep 29 19:15:23 crc kubenswrapper[4792]: I0929 19:15:23.016195 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"horizon" Sep 29 19:15:23 crc kubenswrapper[4792]: I0929 19:15:23.034470 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-5dhfl" Sep 29 19:15:23 crc kubenswrapper[4792]: I0929 19:15:23.042019 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-sync-cvlgm"] Sep 29 19:15:23 crc kubenswrapper[4792]: I0929 19:15:23.065716 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/72bc741a-4542-48fb-b65c-c7a12570d80a-config\") pod \"neutron-db-sync-j6jb8\" (UID: \"72bc741a-4542-48fb-b65c-c7a12570d80a\") " pod="openstack/neutron-db-sync-j6jb8" Sep 29 19:15:23 crc kubenswrapper[4792]: I0929 19:15:23.065960 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/72bc741a-4542-48fb-b65c-c7a12570d80a-combined-ca-bundle\") pod \"neutron-db-sync-j6jb8\" (UID: \"72bc741a-4542-48fb-b65c-c7a12570d80a\") " pod="openstack/neutron-db-sync-j6jb8" Sep 29 19:15:23 crc kubenswrapper[4792]: I0929 19:15:23.066070 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hldjl\" (UniqueName: \"kubernetes.io/projected/72bc741a-4542-48fb-b65c-c7a12570d80a-kube-api-access-hldjl\") pod \"neutron-db-sync-j6jb8\" (UID: \"72bc741a-4542-48fb-b65c-c7a12570d80a\") " pod="openstack/neutron-db-sync-j6jb8" Sep 29 19:15:23 crc kubenswrapper[4792]: I0929 19:15:23.083994 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/72bc741a-4542-48fb-b65c-c7a12570d80a-combined-ca-bundle\") pod \"neutron-db-sync-j6jb8\" (UID: \"72bc741a-4542-48fb-b65c-c7a12570d80a\") " pod="openstack/neutron-db-sync-j6jb8" Sep 29 19:15:23 crc kubenswrapper[4792]: I0929 19:15:23.094915 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-7486b4cb6c-fr9s4"] Sep 29 19:15:23 crc kubenswrapper[4792]: I0929 19:15:23.102365 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hldjl\" (UniqueName: \"kubernetes.io/projected/72bc741a-4542-48fb-b65c-c7a12570d80a-kube-api-access-hldjl\") pod \"neutron-db-sync-j6jb8\" (UID: \"72bc741a-4542-48fb-b65c-c7a12570d80a\") " pod="openstack/neutron-db-sync-j6jb8" Sep 29 19:15:23 crc kubenswrapper[4792]: I0929 19:15:23.154579 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-bbf5cc879-sz2kd" Sep 29 19:15:23 crc kubenswrapper[4792]: I0929 19:15:23.167842 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/9953fe09-a48c-4c74-83a1-9de5e8cec46d-config-data\") pod \"horizon-7486b4cb6c-fr9s4\" (UID: \"9953fe09-a48c-4c74-83a1-9de5e8cec46d\") " pod="openstack/horizon-7486b4cb6c-fr9s4" Sep 29 19:15:23 crc kubenswrapper[4792]: I0929 19:15:23.167913 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bd0405ab-8be9-41cd-aa4d-7cbe44be3049-combined-ca-bundle\") pod \"cinder-db-sync-cvlgm\" (UID: \"bd0405ab-8be9-41cd-aa4d-7cbe44be3049\") " pod="openstack/cinder-db-sync-cvlgm" Sep 29 19:15:23 crc kubenswrapper[4792]: I0929 19:15:23.167938 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9953fe09-a48c-4c74-83a1-9de5e8cec46d-logs\") pod \"horizon-7486b4cb6c-fr9s4\" (UID: \"9953fe09-a48c-4c74-83a1-9de5e8cec46d\") " pod="openstack/horizon-7486b4cb6c-fr9s4" Sep 29 19:15:23 crc kubenswrapper[4792]: I0929 19:15:23.167964 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h8tnq\" (UniqueName: \"kubernetes.io/projected/9953fe09-a48c-4c74-83a1-9de5e8cec46d-kube-api-access-h8tnq\") pod \"horizon-7486b4cb6c-fr9s4\" (UID: \"9953fe09-a48c-4c74-83a1-9de5e8cec46d\") " pod="openstack/horizon-7486b4cb6c-fr9s4" Sep 29 19:15:23 crc kubenswrapper[4792]: I0929 19:15:23.167986 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/bd0405ab-8be9-41cd-aa4d-7cbe44be3049-etc-machine-id\") pod \"cinder-db-sync-cvlgm\" (UID: \"bd0405ab-8be9-41cd-aa4d-7cbe44be3049\") " pod="openstack/cinder-db-sync-cvlgm" Sep 29 19:15:23 crc kubenswrapper[4792]: I0929 19:15:23.168016 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/9953fe09-a48c-4c74-83a1-9de5e8cec46d-scripts\") pod \"horizon-7486b4cb6c-fr9s4\" (UID: \"9953fe09-a48c-4c74-83a1-9de5e8cec46d\") " pod="openstack/horizon-7486b4cb6c-fr9s4" Sep 29 19:15:23 crc kubenswrapper[4792]: I0929 19:15:23.168034 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/bd0405ab-8be9-41cd-aa4d-7cbe44be3049-db-sync-config-data\") pod \"cinder-db-sync-cvlgm\" (UID: \"bd0405ab-8be9-41cd-aa4d-7cbe44be3049\") " pod="openstack/cinder-db-sync-cvlgm" Sep 29 19:15:23 crc kubenswrapper[4792]: I0929 19:15:23.168088 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bd0405ab-8be9-41cd-aa4d-7cbe44be3049-config-data\") pod \"cinder-db-sync-cvlgm\" (UID: \"bd0405ab-8be9-41cd-aa4d-7cbe44be3049\") " pod="openstack/cinder-db-sync-cvlgm" Sep 29 19:15:23 crc kubenswrapper[4792]: I0929 19:15:23.168105 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/9953fe09-a48c-4c74-83a1-9de5e8cec46d-horizon-secret-key\") pod \"horizon-7486b4cb6c-fr9s4\" (UID: \"9953fe09-a48c-4c74-83a1-9de5e8cec46d\") " pod="openstack/horizon-7486b4cb6c-fr9s4" Sep 29 19:15:23 crc kubenswrapper[4792]: I0929 19:15:23.168124 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hcl6j\" (UniqueName: \"kubernetes.io/projected/bd0405ab-8be9-41cd-aa4d-7cbe44be3049-kube-api-access-hcl6j\") pod \"cinder-db-sync-cvlgm\" (UID: \"bd0405ab-8be9-41cd-aa4d-7cbe44be3049\") " pod="openstack/cinder-db-sync-cvlgm" Sep 29 19:15:23 crc kubenswrapper[4792]: I0929 19:15:23.168138 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/bd0405ab-8be9-41cd-aa4d-7cbe44be3049-scripts\") pod \"cinder-db-sync-cvlgm\" (UID: \"bd0405ab-8be9-41cd-aa4d-7cbe44be3049\") " pod="openstack/cinder-db-sync-cvlgm" Sep 29 19:15:23 crc kubenswrapper[4792]: I0929 19:15:23.196093 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Sep 29 19:15:23 crc kubenswrapper[4792]: I0929 19:15:23.198277 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 29 19:15:23 crc kubenswrapper[4792]: I0929 19:15:23.211141 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Sep 29 19:15:23 crc kubenswrapper[4792]: I0929 19:15:23.211326 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Sep 29 19:15:23 crc kubenswrapper[4792]: I0929 19:15:23.265001 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-db-sync-5bsn5"] Sep 29 19:15:23 crc kubenswrapper[4792]: I0929 19:15:23.266200 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-5bsn5" Sep 29 19:15:23 crc kubenswrapper[4792]: I0929 19:15:23.271022 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bd0405ab-8be9-41cd-aa4d-7cbe44be3049-config-data\") pod \"cinder-db-sync-cvlgm\" (UID: \"bd0405ab-8be9-41cd-aa4d-7cbe44be3049\") " pod="openstack/cinder-db-sync-cvlgm" Sep 29 19:15:23 crc kubenswrapper[4792]: I0929 19:15:23.271056 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/9953fe09-a48c-4c74-83a1-9de5e8cec46d-horizon-secret-key\") pod \"horizon-7486b4cb6c-fr9s4\" (UID: \"9953fe09-a48c-4c74-83a1-9de5e8cec46d\") " pod="openstack/horizon-7486b4cb6c-fr9s4" Sep 29 19:15:23 crc kubenswrapper[4792]: I0929 19:15:23.271081 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hcl6j\" (UniqueName: \"kubernetes.io/projected/bd0405ab-8be9-41cd-aa4d-7cbe44be3049-kube-api-access-hcl6j\") pod \"cinder-db-sync-cvlgm\" (UID: \"bd0405ab-8be9-41cd-aa4d-7cbe44be3049\") " pod="openstack/cinder-db-sync-cvlgm" Sep 29 19:15:23 crc kubenswrapper[4792]: I0929 19:15:23.271098 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/bd0405ab-8be9-41cd-aa4d-7cbe44be3049-scripts\") pod \"cinder-db-sync-cvlgm\" (UID: \"bd0405ab-8be9-41cd-aa4d-7cbe44be3049\") " pod="openstack/cinder-db-sync-cvlgm" Sep 29 19:15:23 crc kubenswrapper[4792]: I0929 19:15:23.271143 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/9953fe09-a48c-4c74-83a1-9de5e8cec46d-config-data\") pod \"horizon-7486b4cb6c-fr9s4\" (UID: \"9953fe09-a48c-4c74-83a1-9de5e8cec46d\") " pod="openstack/horizon-7486b4cb6c-fr9s4" Sep 29 19:15:23 crc kubenswrapper[4792]: I0929 19:15:23.271171 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bd0405ab-8be9-41cd-aa4d-7cbe44be3049-combined-ca-bundle\") pod \"cinder-db-sync-cvlgm\" (UID: \"bd0405ab-8be9-41cd-aa4d-7cbe44be3049\") " pod="openstack/cinder-db-sync-cvlgm" Sep 29 19:15:23 crc kubenswrapper[4792]: I0929 19:15:23.271192 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9953fe09-a48c-4c74-83a1-9de5e8cec46d-logs\") pod \"horizon-7486b4cb6c-fr9s4\" (UID: \"9953fe09-a48c-4c74-83a1-9de5e8cec46d\") " pod="openstack/horizon-7486b4cb6c-fr9s4" Sep 29 19:15:23 crc kubenswrapper[4792]: I0929 19:15:23.271214 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h8tnq\" (UniqueName: \"kubernetes.io/projected/9953fe09-a48c-4c74-83a1-9de5e8cec46d-kube-api-access-h8tnq\") pod \"horizon-7486b4cb6c-fr9s4\" (UID: \"9953fe09-a48c-4c74-83a1-9de5e8cec46d\") " pod="openstack/horizon-7486b4cb6c-fr9s4" Sep 29 19:15:23 crc kubenswrapper[4792]: I0929 19:15:23.271237 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/bd0405ab-8be9-41cd-aa4d-7cbe44be3049-etc-machine-id\") pod \"cinder-db-sync-cvlgm\" (UID: \"bd0405ab-8be9-41cd-aa4d-7cbe44be3049\") " pod="openstack/cinder-db-sync-cvlgm" Sep 29 19:15:23 crc kubenswrapper[4792]: I0929 19:15:23.271266 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/9953fe09-a48c-4c74-83a1-9de5e8cec46d-scripts\") pod \"horizon-7486b4cb6c-fr9s4\" (UID: \"9953fe09-a48c-4c74-83a1-9de5e8cec46d\") " pod="openstack/horizon-7486b4cb6c-fr9s4" Sep 29 19:15:23 crc kubenswrapper[4792]: I0929 19:15:23.271284 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/bd0405ab-8be9-41cd-aa4d-7cbe44be3049-db-sync-config-data\") pod \"cinder-db-sync-cvlgm\" (UID: \"bd0405ab-8be9-41cd-aa4d-7cbe44be3049\") " pod="openstack/cinder-db-sync-cvlgm" Sep 29 19:15:23 crc kubenswrapper[4792]: I0929 19:15:23.272349 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/9953fe09-a48c-4c74-83a1-9de5e8cec46d-config-data\") pod \"horizon-7486b4cb6c-fr9s4\" (UID: \"9953fe09-a48c-4c74-83a1-9de5e8cec46d\") " pod="openstack/horizon-7486b4cb6c-fr9s4" Sep 29 19:15:23 crc kubenswrapper[4792]: I0929 19:15:23.281036 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/bd0405ab-8be9-41cd-aa4d-7cbe44be3049-etc-machine-id\") pod \"cinder-db-sync-cvlgm\" (UID: \"bd0405ab-8be9-41cd-aa4d-7cbe44be3049\") " pod="openstack/cinder-db-sync-cvlgm" Sep 29 19:15:23 crc kubenswrapper[4792]: I0929 19:15:23.281379 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9953fe09-a48c-4c74-83a1-9de5e8cec46d-logs\") pod \"horizon-7486b4cb6c-fr9s4\" (UID: \"9953fe09-a48c-4c74-83a1-9de5e8cec46d\") " pod="openstack/horizon-7486b4cb6c-fr9s4" Sep 29 19:15:23 crc kubenswrapper[4792]: I0929 19:15:23.282014 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/9953fe09-a48c-4c74-83a1-9de5e8cec46d-scripts\") pod \"horizon-7486b4cb6c-fr9s4\" (UID: \"9953fe09-a48c-4c74-83a1-9de5e8cec46d\") " pod="openstack/horizon-7486b4cb6c-fr9s4" Sep 29 19:15:23 crc kubenswrapper[4792]: I0929 19:15:23.282905 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-barbican-dockercfg-pkvl7" Sep 29 19:15:23 crc kubenswrapper[4792]: I0929 19:15:23.291361 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-bbf5cc879-sz2kd"] Sep 29 19:15:23 crc kubenswrapper[4792]: I0929 19:15:23.294271 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-config-data" Sep 29 19:15:23 crc kubenswrapper[4792]: I0929 19:15:23.311458 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/bd0405ab-8be9-41cd-aa4d-7cbe44be3049-scripts\") pod \"cinder-db-sync-cvlgm\" (UID: \"bd0405ab-8be9-41cd-aa4d-7cbe44be3049\") " pod="openstack/cinder-db-sync-cvlgm" Sep 29 19:15:23 crc kubenswrapper[4792]: I0929 19:15:23.322512 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/9953fe09-a48c-4c74-83a1-9de5e8cec46d-horizon-secret-key\") pod \"horizon-7486b4cb6c-fr9s4\" (UID: \"9953fe09-a48c-4c74-83a1-9de5e8cec46d\") " pod="openstack/horizon-7486b4cb6c-fr9s4" Sep 29 19:15:23 crc kubenswrapper[4792]: I0929 19:15:23.325577 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bd0405ab-8be9-41cd-aa4d-7cbe44be3049-config-data\") pod \"cinder-db-sync-cvlgm\" (UID: \"bd0405ab-8be9-41cd-aa4d-7cbe44be3049\") " pod="openstack/cinder-db-sync-cvlgm" Sep 29 19:15:23 crc kubenswrapper[4792]: I0929 19:15:23.327311 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h8tnq\" (UniqueName: \"kubernetes.io/projected/9953fe09-a48c-4c74-83a1-9de5e8cec46d-kube-api-access-h8tnq\") pod \"horizon-7486b4cb6c-fr9s4\" (UID: \"9953fe09-a48c-4c74-83a1-9de5e8cec46d\") " pod="openstack/horizon-7486b4cb6c-fr9s4" Sep 29 19:15:23 crc kubenswrapper[4792]: I0929 19:15:23.330989 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hcl6j\" (UniqueName: \"kubernetes.io/projected/bd0405ab-8be9-41cd-aa4d-7cbe44be3049-kube-api-access-hcl6j\") pod \"cinder-db-sync-cvlgm\" (UID: \"bd0405ab-8be9-41cd-aa4d-7cbe44be3049\") " pod="openstack/cinder-db-sync-cvlgm" Sep 29 19:15:23 crc kubenswrapper[4792]: I0929 19:15:23.342859 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-7486b4cb6c-fr9s4" Sep 29 19:15:23 crc kubenswrapper[4792]: I0929 19:15:23.358724 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bd0405ab-8be9-41cd-aa4d-7cbe44be3049-combined-ca-bundle\") pod \"cinder-db-sync-cvlgm\" (UID: \"bd0405ab-8be9-41cd-aa4d-7cbe44be3049\") " pod="openstack/cinder-db-sync-cvlgm" Sep 29 19:15:23 crc kubenswrapper[4792]: I0929 19:15:23.366608 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/bd0405ab-8be9-41cd-aa4d-7cbe44be3049-db-sync-config-data\") pod \"cinder-db-sync-cvlgm\" (UID: \"bd0405ab-8be9-41cd-aa4d-7cbe44be3049\") " pod="openstack/cinder-db-sync-cvlgm" Sep 29 19:15:23 crc kubenswrapper[4792]: I0929 19:15:23.376290 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/06538688-0bb7-45ae-a249-94ba5c312b2b-scripts\") pod \"ceilometer-0\" (UID: \"06538688-0bb7-45ae-a249-94ba5c312b2b\") " pod="openstack/ceilometer-0" Sep 29 19:15:23 crc kubenswrapper[4792]: I0929 19:15:23.376352 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/3c3fc253-fb19-4845-a099-4754b7a55cdb-db-sync-config-data\") pod \"barbican-db-sync-5bsn5\" (UID: \"3c3fc253-fb19-4845-a099-4754b7a55cdb\") " pod="openstack/barbican-db-sync-5bsn5" Sep 29 19:15:23 crc kubenswrapper[4792]: I0929 19:15:23.376400 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/06538688-0bb7-45ae-a249-94ba5c312b2b-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"06538688-0bb7-45ae-a249-94ba5c312b2b\") " pod="openstack/ceilometer-0" Sep 29 19:15:23 crc kubenswrapper[4792]: I0929 19:15:23.376435 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/06538688-0bb7-45ae-a249-94ba5c312b2b-log-httpd\") pod \"ceilometer-0\" (UID: \"06538688-0bb7-45ae-a249-94ba5c312b2b\") " pod="openstack/ceilometer-0" Sep 29 19:15:23 crc kubenswrapper[4792]: I0929 19:15:23.376476 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/06538688-0bb7-45ae-a249-94ba5c312b2b-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"06538688-0bb7-45ae-a249-94ba5c312b2b\") " pod="openstack/ceilometer-0" Sep 29 19:15:23 crc kubenswrapper[4792]: I0929 19:15:23.376495 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fxgsk\" (UniqueName: \"kubernetes.io/projected/3c3fc253-fb19-4845-a099-4754b7a55cdb-kube-api-access-fxgsk\") pod \"barbican-db-sync-5bsn5\" (UID: \"3c3fc253-fb19-4845-a099-4754b7a55cdb\") " pod="openstack/barbican-db-sync-5bsn5" Sep 29 19:15:23 crc kubenswrapper[4792]: I0929 19:15:23.376514 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n7bw8\" (UniqueName: \"kubernetes.io/projected/06538688-0bb7-45ae-a249-94ba5c312b2b-kube-api-access-n7bw8\") pod \"ceilometer-0\" (UID: \"06538688-0bb7-45ae-a249-94ba5c312b2b\") " pod="openstack/ceilometer-0" Sep 29 19:15:23 crc kubenswrapper[4792]: I0929 19:15:23.376537 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/06538688-0bb7-45ae-a249-94ba5c312b2b-config-data\") pod \"ceilometer-0\" (UID: \"06538688-0bb7-45ae-a249-94ba5c312b2b\") " pod="openstack/ceilometer-0" Sep 29 19:15:23 crc kubenswrapper[4792]: I0929 19:15:23.376557 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3c3fc253-fb19-4845-a099-4754b7a55cdb-combined-ca-bundle\") pod \"barbican-db-sync-5bsn5\" (UID: \"3c3fc253-fb19-4845-a099-4754b7a55cdb\") " pod="openstack/barbican-db-sync-5bsn5" Sep 29 19:15:23 crc kubenswrapper[4792]: I0929 19:15:23.376584 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/06538688-0bb7-45ae-a249-94ba5c312b2b-run-httpd\") pod \"ceilometer-0\" (UID: \"06538688-0bb7-45ae-a249-94ba5c312b2b\") " pod="openstack/ceilometer-0" Sep 29 19:15:23 crc kubenswrapper[4792]: I0929 19:15:23.390907 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Sep 29 19:15:23 crc kubenswrapper[4792]: I0929 19:15:23.399031 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-sync-5bsn5"] Sep 29 19:15:23 crc kubenswrapper[4792]: I0929 19:15:23.414529 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-db-sync-qwkmr"] Sep 29 19:15:23 crc kubenswrapper[4792]: I0929 19:15:23.415780 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-qwkmr" Sep 29 19:15:23 crc kubenswrapper[4792]: I0929 19:15:23.430144 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-scripts" Sep 29 19:15:23 crc kubenswrapper[4792]: I0929 19:15:23.430422 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-placement-dockercfg-5jnkz" Sep 29 19:15:23 crc kubenswrapper[4792]: I0929 19:15:23.430777 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-config-data" Sep 29 19:15:23 crc kubenswrapper[4792]: I0929 19:15:23.438661 4792 generic.go:334] "Generic (PLEG): container finished" podID="3c6ec0a6-e0bd-4183-9730-2a420004fcae" containerID="445bba3cfb494ada92f61028b400aab7f98cea6e03fce8c20bbc624a04a73c55" exitCode=0 Sep 29 19:15:23 crc kubenswrapper[4792]: I0929 19:15:23.438695 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5f59b8f679-lwxwr" event={"ID":"3c6ec0a6-e0bd-4183-9730-2a420004fcae","Type":"ContainerDied","Data":"445bba3cfb494ada92f61028b400aab7f98cea6e03fce8c20bbc624a04a73c55"} Sep 29 19:15:23 crc kubenswrapper[4792]: I0929 19:15:23.467821 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-sync-qwkmr"] Sep 29 19:15:23 crc kubenswrapper[4792]: I0929 19:15:23.477817 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/06538688-0bb7-45ae-a249-94ba5c312b2b-config-data\") pod \"ceilometer-0\" (UID: \"06538688-0bb7-45ae-a249-94ba5c312b2b\") " pod="openstack/ceilometer-0" Sep 29 19:15:23 crc kubenswrapper[4792]: I0929 19:15:23.481622 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3c3fc253-fb19-4845-a099-4754b7a55cdb-combined-ca-bundle\") pod \"barbican-db-sync-5bsn5\" (UID: \"3c3fc253-fb19-4845-a099-4754b7a55cdb\") " pod="openstack/barbican-db-sync-5bsn5" Sep 29 19:15:23 crc kubenswrapper[4792]: I0929 19:15:23.481727 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/06538688-0bb7-45ae-a249-94ba5c312b2b-run-httpd\") pod \"ceilometer-0\" (UID: \"06538688-0bb7-45ae-a249-94ba5c312b2b\") " pod="openstack/ceilometer-0" Sep 29 19:15:23 crc kubenswrapper[4792]: I0929 19:15:23.481790 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/06538688-0bb7-45ae-a249-94ba5c312b2b-scripts\") pod \"ceilometer-0\" (UID: \"06538688-0bb7-45ae-a249-94ba5c312b2b\") " pod="openstack/ceilometer-0" Sep 29 19:15:23 crc kubenswrapper[4792]: I0929 19:15:23.481893 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/3c3fc253-fb19-4845-a099-4754b7a55cdb-db-sync-config-data\") pod \"barbican-db-sync-5bsn5\" (UID: \"3c3fc253-fb19-4845-a099-4754b7a55cdb\") " pod="openstack/barbican-db-sync-5bsn5" Sep 29 19:15:23 crc kubenswrapper[4792]: I0929 19:15:23.482029 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/06538688-0bb7-45ae-a249-94ba5c312b2b-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"06538688-0bb7-45ae-a249-94ba5c312b2b\") " pod="openstack/ceilometer-0" Sep 29 19:15:23 crc kubenswrapper[4792]: I0929 19:15:23.482116 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/06538688-0bb7-45ae-a249-94ba5c312b2b-log-httpd\") pod \"ceilometer-0\" (UID: \"06538688-0bb7-45ae-a249-94ba5c312b2b\") " pod="openstack/ceilometer-0" Sep 29 19:15:23 crc kubenswrapper[4792]: I0929 19:15:23.482224 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/06538688-0bb7-45ae-a249-94ba5c312b2b-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"06538688-0bb7-45ae-a249-94ba5c312b2b\") " pod="openstack/ceilometer-0" Sep 29 19:15:23 crc kubenswrapper[4792]: I0929 19:15:23.482262 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fxgsk\" (UniqueName: \"kubernetes.io/projected/3c3fc253-fb19-4845-a099-4754b7a55cdb-kube-api-access-fxgsk\") pod \"barbican-db-sync-5bsn5\" (UID: \"3c3fc253-fb19-4845-a099-4754b7a55cdb\") " pod="openstack/barbican-db-sync-5bsn5" Sep 29 19:15:23 crc kubenswrapper[4792]: I0929 19:15:23.482304 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n7bw8\" (UniqueName: \"kubernetes.io/projected/06538688-0bb7-45ae-a249-94ba5c312b2b-kube-api-access-n7bw8\") pod \"ceilometer-0\" (UID: \"06538688-0bb7-45ae-a249-94ba5c312b2b\") " pod="openstack/ceilometer-0" Sep 29 19:15:23 crc kubenswrapper[4792]: I0929 19:15:23.489777 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/06538688-0bb7-45ae-a249-94ba5c312b2b-log-httpd\") pod \"ceilometer-0\" (UID: \"06538688-0bb7-45ae-a249-94ba5c312b2b\") " pod="openstack/ceilometer-0" Sep 29 19:15:23 crc kubenswrapper[4792]: I0929 19:15:23.489981 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/06538688-0bb7-45ae-a249-94ba5c312b2b-run-httpd\") pod \"ceilometer-0\" (UID: \"06538688-0bb7-45ae-a249-94ba5c312b2b\") " pod="openstack/ceilometer-0" Sep 29 19:15:23 crc kubenswrapper[4792]: I0929 19:15:23.501081 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/06538688-0bb7-45ae-a249-94ba5c312b2b-config-data\") pod \"ceilometer-0\" (UID: \"06538688-0bb7-45ae-a249-94ba5c312b2b\") " pod="openstack/ceilometer-0" Sep 29 19:15:23 crc kubenswrapper[4792]: I0929 19:15:23.506343 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/3c3fc253-fb19-4845-a099-4754b7a55cdb-db-sync-config-data\") pod \"barbican-db-sync-5bsn5\" (UID: \"3c3fc253-fb19-4845-a099-4754b7a55cdb\") " pod="openstack/barbican-db-sync-5bsn5" Sep 29 19:15:23 crc kubenswrapper[4792]: I0929 19:15:23.508316 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/06538688-0bb7-45ae-a249-94ba5c312b2b-scripts\") pod \"ceilometer-0\" (UID: \"06538688-0bb7-45ae-a249-94ba5c312b2b\") " pod="openstack/ceilometer-0" Sep 29 19:15:23 crc kubenswrapper[4792]: I0929 19:15:23.509634 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/06538688-0bb7-45ae-a249-94ba5c312b2b-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"06538688-0bb7-45ae-a249-94ba5c312b2b\") " pod="openstack/ceilometer-0" Sep 29 19:15:23 crc kubenswrapper[4792]: I0929 19:15:23.510868 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n7bw8\" (UniqueName: \"kubernetes.io/projected/06538688-0bb7-45ae-a249-94ba5c312b2b-kube-api-access-n7bw8\") pod \"ceilometer-0\" (UID: \"06538688-0bb7-45ae-a249-94ba5c312b2b\") " pod="openstack/ceilometer-0" Sep 29 19:15:23 crc kubenswrapper[4792]: I0929 19:15:23.511991 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/06538688-0bb7-45ae-a249-94ba5c312b2b-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"06538688-0bb7-45ae-a249-94ba5c312b2b\") " pod="openstack/ceilometer-0" Sep 29 19:15:23 crc kubenswrapper[4792]: I0929 19:15:23.522246 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3c3fc253-fb19-4845-a099-4754b7a55cdb-combined-ca-bundle\") pod \"barbican-db-sync-5bsn5\" (UID: \"3c3fc253-fb19-4845-a099-4754b7a55cdb\") " pod="openstack/barbican-db-sync-5bsn5" Sep 29 19:15:23 crc kubenswrapper[4792]: I0929 19:15:23.557443 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fxgsk\" (UniqueName: \"kubernetes.io/projected/3c3fc253-fb19-4845-a099-4754b7a55cdb-kube-api-access-fxgsk\") pod \"barbican-db-sync-5bsn5\" (UID: \"3c3fc253-fb19-4845-a099-4754b7a55cdb\") " pod="openstack/barbican-db-sync-5bsn5" Sep 29 19:15:23 crc kubenswrapper[4792]: I0929 19:15:23.560238 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 29 19:15:23 crc kubenswrapper[4792]: I0929 19:15:23.589344 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-56df8fb6b7-rh9hv"] Sep 29 19:15:23 crc kubenswrapper[4792]: I0929 19:15:23.590778 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-56df8fb6b7-rh9hv" Sep 29 19:15:23 crc kubenswrapper[4792]: I0929 19:15:23.596407 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b45ae86a-64ce-47be-a568-021cf9da5107-combined-ca-bundle\") pod \"placement-db-sync-qwkmr\" (UID: \"b45ae86a-64ce-47be-a568-021cf9da5107\") " pod="openstack/placement-db-sync-qwkmr" Sep 29 19:15:23 crc kubenswrapper[4792]: I0929 19:15:23.596610 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b45ae86a-64ce-47be-a568-021cf9da5107-logs\") pod \"placement-db-sync-qwkmr\" (UID: \"b45ae86a-64ce-47be-a568-021cf9da5107\") " pod="openstack/placement-db-sync-qwkmr" Sep 29 19:15:23 crc kubenswrapper[4792]: I0929 19:15:23.596706 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/fc9bf108-a60d-4111-b064-f37789e2d7c1-dns-svc\") pod \"dnsmasq-dns-56df8fb6b7-rh9hv\" (UID: \"fc9bf108-a60d-4111-b064-f37789e2d7c1\") " pod="openstack/dnsmasq-dns-56df8fb6b7-rh9hv" Sep 29 19:15:23 crc kubenswrapper[4792]: I0929 19:15:23.596840 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/fc9bf108-a60d-4111-b064-f37789e2d7c1-ovsdbserver-nb\") pod \"dnsmasq-dns-56df8fb6b7-rh9hv\" (UID: \"fc9bf108-a60d-4111-b064-f37789e2d7c1\") " pod="openstack/dnsmasq-dns-56df8fb6b7-rh9hv" Sep 29 19:15:23 crc kubenswrapper[4792]: I0929 19:15:23.597026 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/fc9bf108-a60d-4111-b064-f37789e2d7c1-dns-swift-storage-0\") pod \"dnsmasq-dns-56df8fb6b7-rh9hv\" (UID: \"fc9bf108-a60d-4111-b064-f37789e2d7c1\") " pod="openstack/dnsmasq-dns-56df8fb6b7-rh9hv" Sep 29 19:15:23 crc kubenswrapper[4792]: I0929 19:15:23.597150 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t8xfx\" (UniqueName: \"kubernetes.io/projected/b45ae86a-64ce-47be-a568-021cf9da5107-kube-api-access-t8xfx\") pod \"placement-db-sync-qwkmr\" (UID: \"b45ae86a-64ce-47be-a568-021cf9da5107\") " pod="openstack/placement-db-sync-qwkmr" Sep 29 19:15:23 crc kubenswrapper[4792]: I0929 19:15:23.597259 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b45ae86a-64ce-47be-a568-021cf9da5107-scripts\") pod \"placement-db-sync-qwkmr\" (UID: \"b45ae86a-64ce-47be-a568-021cf9da5107\") " pod="openstack/placement-db-sync-qwkmr" Sep 29 19:15:23 crc kubenswrapper[4792]: I0929 19:15:23.597380 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-stxf8\" (UniqueName: \"kubernetes.io/projected/fc9bf108-a60d-4111-b064-f37789e2d7c1-kube-api-access-stxf8\") pod \"dnsmasq-dns-56df8fb6b7-rh9hv\" (UID: \"fc9bf108-a60d-4111-b064-f37789e2d7c1\") " pod="openstack/dnsmasq-dns-56df8fb6b7-rh9hv" Sep 29 19:15:23 crc kubenswrapper[4792]: I0929 19:15:23.597458 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b45ae86a-64ce-47be-a568-021cf9da5107-config-data\") pod \"placement-db-sync-qwkmr\" (UID: \"b45ae86a-64ce-47be-a568-021cf9da5107\") " pod="openstack/placement-db-sync-qwkmr" Sep 29 19:15:23 crc kubenswrapper[4792]: I0929 19:15:23.597522 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fc9bf108-a60d-4111-b064-f37789e2d7c1-config\") pod \"dnsmasq-dns-56df8fb6b7-rh9hv\" (UID: \"fc9bf108-a60d-4111-b064-f37789e2d7c1\") " pod="openstack/dnsmasq-dns-56df8fb6b7-rh9hv" Sep 29 19:15:23 crc kubenswrapper[4792]: I0929 19:15:23.597636 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/fc9bf108-a60d-4111-b064-f37789e2d7c1-ovsdbserver-sb\") pod \"dnsmasq-dns-56df8fb6b7-rh9hv\" (UID: \"fc9bf108-a60d-4111-b064-f37789e2d7c1\") " pod="openstack/dnsmasq-dns-56df8fb6b7-rh9hv" Sep 29 19:15:23 crc kubenswrapper[4792]: I0929 19:15:23.612100 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-5bsn5" Sep 29 19:15:23 crc kubenswrapper[4792]: I0929 19:15:23.614103 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-cvlgm" Sep 29 19:15:23 crc kubenswrapper[4792]: I0929 19:15:23.630527 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-56df8fb6b7-rh9hv"] Sep 29 19:15:23 crc kubenswrapper[4792]: I0929 19:15:23.663291 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Sep 29 19:15:23 crc kubenswrapper[4792]: I0929 19:15:23.665180 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Sep 29 19:15:23 crc kubenswrapper[4792]: I0929 19:15:23.672968 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-scripts" Sep 29 19:15:23 crc kubenswrapper[4792]: I0929 19:15:23.673227 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-hgxkm" Sep 29 19:15:23 crc kubenswrapper[4792]: I0929 19:15:23.673314 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-public-svc" Sep 29 19:15:23 crc kubenswrapper[4792]: I0929 19:15:23.674124 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Sep 29 19:15:23 crc kubenswrapper[4792]: I0929 19:15:23.682703 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-79f9946749-wjkgr"] Sep 29 19:15:23 crc kubenswrapper[4792]: I0929 19:15:23.686626 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-79f9946749-wjkgr" Sep 29 19:15:23 crc kubenswrapper[4792]: I0929 19:15:23.728995 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/fc9bf108-a60d-4111-b064-f37789e2d7c1-ovsdbserver-nb\") pod \"dnsmasq-dns-56df8fb6b7-rh9hv\" (UID: \"fc9bf108-a60d-4111-b064-f37789e2d7c1\") " pod="openstack/dnsmasq-dns-56df8fb6b7-rh9hv" Sep 29 19:15:23 crc kubenswrapper[4792]: I0929 19:15:23.729058 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/fc9bf108-a60d-4111-b064-f37789e2d7c1-dns-swift-storage-0\") pod \"dnsmasq-dns-56df8fb6b7-rh9hv\" (UID: \"fc9bf108-a60d-4111-b064-f37789e2d7c1\") " pod="openstack/dnsmasq-dns-56df8fb6b7-rh9hv" Sep 29 19:15:23 crc kubenswrapper[4792]: I0929 19:15:23.729134 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t8xfx\" (UniqueName: \"kubernetes.io/projected/b45ae86a-64ce-47be-a568-021cf9da5107-kube-api-access-t8xfx\") pod \"placement-db-sync-qwkmr\" (UID: \"b45ae86a-64ce-47be-a568-021cf9da5107\") " pod="openstack/placement-db-sync-qwkmr" Sep 29 19:15:23 crc kubenswrapper[4792]: I0929 19:15:23.729192 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b45ae86a-64ce-47be-a568-021cf9da5107-scripts\") pod \"placement-db-sync-qwkmr\" (UID: \"b45ae86a-64ce-47be-a568-021cf9da5107\") " pod="openstack/placement-db-sync-qwkmr" Sep 29 19:15:23 crc kubenswrapper[4792]: I0929 19:15:23.729251 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-stxf8\" (UniqueName: \"kubernetes.io/projected/fc9bf108-a60d-4111-b064-f37789e2d7c1-kube-api-access-stxf8\") pod \"dnsmasq-dns-56df8fb6b7-rh9hv\" (UID: \"fc9bf108-a60d-4111-b064-f37789e2d7c1\") " pod="openstack/dnsmasq-dns-56df8fb6b7-rh9hv" Sep 29 19:15:23 crc kubenswrapper[4792]: I0929 19:15:23.729287 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b45ae86a-64ce-47be-a568-021cf9da5107-config-data\") pod \"placement-db-sync-qwkmr\" (UID: \"b45ae86a-64ce-47be-a568-021cf9da5107\") " pod="openstack/placement-db-sync-qwkmr" Sep 29 19:15:23 crc kubenswrapper[4792]: I0929 19:15:23.729315 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fc9bf108-a60d-4111-b064-f37789e2d7c1-config\") pod \"dnsmasq-dns-56df8fb6b7-rh9hv\" (UID: \"fc9bf108-a60d-4111-b064-f37789e2d7c1\") " pod="openstack/dnsmasq-dns-56df8fb6b7-rh9hv" Sep 29 19:15:23 crc kubenswrapper[4792]: I0929 19:15:23.729363 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/fc9bf108-a60d-4111-b064-f37789e2d7c1-ovsdbserver-sb\") pod \"dnsmasq-dns-56df8fb6b7-rh9hv\" (UID: \"fc9bf108-a60d-4111-b064-f37789e2d7c1\") " pod="openstack/dnsmasq-dns-56df8fb6b7-rh9hv" Sep 29 19:15:23 crc kubenswrapper[4792]: I0929 19:15:23.729410 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b45ae86a-64ce-47be-a568-021cf9da5107-combined-ca-bundle\") pod \"placement-db-sync-qwkmr\" (UID: \"b45ae86a-64ce-47be-a568-021cf9da5107\") " pod="openstack/placement-db-sync-qwkmr" Sep 29 19:15:23 crc kubenswrapper[4792]: I0929 19:15:23.729441 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b45ae86a-64ce-47be-a568-021cf9da5107-logs\") pod \"placement-db-sync-qwkmr\" (UID: \"b45ae86a-64ce-47be-a568-021cf9da5107\") " pod="openstack/placement-db-sync-qwkmr" Sep 29 19:15:23 crc kubenswrapper[4792]: I0929 19:15:23.729457 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/fc9bf108-a60d-4111-b064-f37789e2d7c1-dns-svc\") pod \"dnsmasq-dns-56df8fb6b7-rh9hv\" (UID: \"fc9bf108-a60d-4111-b064-f37789e2d7c1\") " pod="openstack/dnsmasq-dns-56df8fb6b7-rh9hv" Sep 29 19:15:23 crc kubenswrapper[4792]: I0929 19:15:23.735954 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Sep 29 19:15:23 crc kubenswrapper[4792]: I0929 19:15:23.742790 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/fc9bf108-a60d-4111-b064-f37789e2d7c1-ovsdbserver-sb\") pod \"dnsmasq-dns-56df8fb6b7-rh9hv\" (UID: \"fc9bf108-a60d-4111-b064-f37789e2d7c1\") " pod="openstack/dnsmasq-dns-56df8fb6b7-rh9hv" Sep 29 19:15:23 crc kubenswrapper[4792]: I0929 19:15:23.743455 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/fc9bf108-a60d-4111-b064-f37789e2d7c1-dns-swift-storage-0\") pod \"dnsmasq-dns-56df8fb6b7-rh9hv\" (UID: \"fc9bf108-a60d-4111-b064-f37789e2d7c1\") " pod="openstack/dnsmasq-dns-56df8fb6b7-rh9hv" Sep 29 19:15:23 crc kubenswrapper[4792]: I0929 19:15:23.764909 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fc9bf108-a60d-4111-b064-f37789e2d7c1-config\") pod \"dnsmasq-dns-56df8fb6b7-rh9hv\" (UID: \"fc9bf108-a60d-4111-b064-f37789e2d7c1\") " pod="openstack/dnsmasq-dns-56df8fb6b7-rh9hv" Sep 29 19:15:23 crc kubenswrapper[4792]: I0929 19:15:23.766586 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/fc9bf108-a60d-4111-b064-f37789e2d7c1-dns-svc\") pod \"dnsmasq-dns-56df8fb6b7-rh9hv\" (UID: \"fc9bf108-a60d-4111-b064-f37789e2d7c1\") " pod="openstack/dnsmasq-dns-56df8fb6b7-rh9hv" Sep 29 19:15:23 crc kubenswrapper[4792]: I0929 19:15:23.768804 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/fc9bf108-a60d-4111-b064-f37789e2d7c1-ovsdbserver-nb\") pod \"dnsmasq-dns-56df8fb6b7-rh9hv\" (UID: \"fc9bf108-a60d-4111-b064-f37789e2d7c1\") " pod="openstack/dnsmasq-dns-56df8fb6b7-rh9hv" Sep 29 19:15:23 crc kubenswrapper[4792]: I0929 19:15:23.771926 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b45ae86a-64ce-47be-a568-021cf9da5107-logs\") pod \"placement-db-sync-qwkmr\" (UID: \"b45ae86a-64ce-47be-a568-021cf9da5107\") " pod="openstack/placement-db-sync-qwkmr" Sep 29 19:15:23 crc kubenswrapper[4792]: I0929 19:15:23.776122 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-config" Sep 29 19:15:23 crc kubenswrapper[4792]: I0929 19:15:23.783136 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-stxf8\" (UniqueName: \"kubernetes.io/projected/fc9bf108-a60d-4111-b064-f37789e2d7c1-kube-api-access-stxf8\") pod \"dnsmasq-dns-56df8fb6b7-rh9hv\" (UID: \"fc9bf108-a60d-4111-b064-f37789e2d7c1\") " pod="openstack/dnsmasq-dns-56df8fb6b7-rh9hv" Sep 29 19:15:23 crc kubenswrapper[4792]: I0929 19:15:23.790111 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b45ae86a-64ce-47be-a568-021cf9da5107-config-data\") pod \"placement-db-sync-qwkmr\" (UID: \"b45ae86a-64ce-47be-a568-021cf9da5107\") " pod="openstack/placement-db-sync-qwkmr" Sep 29 19:15:23 crc kubenswrapper[4792]: I0929 19:15:23.813052 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t8xfx\" (UniqueName: \"kubernetes.io/projected/b45ae86a-64ce-47be-a568-021cf9da5107-kube-api-access-t8xfx\") pod \"placement-db-sync-qwkmr\" (UID: \"b45ae86a-64ce-47be-a568-021cf9da5107\") " pod="openstack/placement-db-sync-qwkmr" Sep 29 19:15:23 crc kubenswrapper[4792]: I0929 19:15:23.825526 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b45ae86a-64ce-47be-a568-021cf9da5107-scripts\") pod \"placement-db-sync-qwkmr\" (UID: \"b45ae86a-64ce-47be-a568-021cf9da5107\") " pod="openstack/placement-db-sync-qwkmr" Sep 29 19:15:23 crc kubenswrapper[4792]: I0929 19:15:23.826053 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b45ae86a-64ce-47be-a568-021cf9da5107-combined-ca-bundle\") pod \"placement-db-sync-qwkmr\" (UID: \"b45ae86a-64ce-47be-a568-021cf9da5107\") " pod="openstack/placement-db-sync-qwkmr" Sep 29 19:15:23 crc kubenswrapper[4792]: I0929 19:15:23.826078 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-79f9946749-wjkgr"] Sep 29 19:15:23 crc kubenswrapper[4792]: I0929 19:15:23.835958 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/72bc741a-4542-48fb-b65c-c7a12570d80a-config\") pod \"neutron-db-sync-j6jb8\" (UID: \"72bc741a-4542-48fb-b65c-c7a12570d80a\") " pod="openstack/neutron-db-sync-j6jb8" Sep 29 19:15:23 crc kubenswrapper[4792]: I0929 19:15:23.874756 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gsdpg\" (UniqueName: \"kubernetes.io/projected/c1df6cb2-f030-42cc-8e73-e1cbfd6d55ef-kube-api-access-gsdpg\") pod \"horizon-79f9946749-wjkgr\" (UID: \"c1df6cb2-f030-42cc-8e73-e1cbfd6d55ef\") " pod="openstack/horizon-79f9946749-wjkgr" Sep 29 19:15:23 crc kubenswrapper[4792]: I0929 19:15:23.874806 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/32da7a07-6996-432d-a9d3-121a2f952b4c-logs\") pod \"glance-default-external-api-0\" (UID: \"32da7a07-6996-432d-a9d3-121a2f952b4c\") " pod="openstack/glance-default-external-api-0" Sep 29 19:15:23 crc kubenswrapper[4792]: I0929 19:15:23.874870 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/32da7a07-6996-432d-a9d3-121a2f952b4c-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"32da7a07-6996-432d-a9d3-121a2f952b4c\") " pod="openstack/glance-default-external-api-0" Sep 29 19:15:23 crc kubenswrapper[4792]: I0929 19:15:23.874989 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h5hbg\" (UniqueName: \"kubernetes.io/projected/32da7a07-6996-432d-a9d3-121a2f952b4c-kube-api-access-h5hbg\") pod \"glance-default-external-api-0\" (UID: \"32da7a07-6996-432d-a9d3-121a2f952b4c\") " pod="openstack/glance-default-external-api-0" Sep 29 19:15:23 crc kubenswrapper[4792]: I0929 19:15:23.875023 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/c1df6cb2-f030-42cc-8e73-e1cbfd6d55ef-config-data\") pod \"horizon-79f9946749-wjkgr\" (UID: \"c1df6cb2-f030-42cc-8e73-e1cbfd6d55ef\") " pod="openstack/horizon-79f9946749-wjkgr" Sep 29 19:15:23 crc kubenswrapper[4792]: I0929 19:15:23.875046 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"glance-default-external-api-0\" (UID: \"32da7a07-6996-432d-a9d3-121a2f952b4c\") " pod="openstack/glance-default-external-api-0" Sep 29 19:15:23 crc kubenswrapper[4792]: I0929 19:15:23.875083 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/32da7a07-6996-432d-a9d3-121a2f952b4c-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"32da7a07-6996-432d-a9d3-121a2f952b4c\") " pod="openstack/glance-default-external-api-0" Sep 29 19:15:23 crc kubenswrapper[4792]: I0929 19:15:23.875124 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/32da7a07-6996-432d-a9d3-121a2f952b4c-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"32da7a07-6996-432d-a9d3-121a2f952b4c\") " pod="openstack/glance-default-external-api-0" Sep 29 19:15:23 crc kubenswrapper[4792]: I0929 19:15:23.875178 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/c1df6cb2-f030-42cc-8e73-e1cbfd6d55ef-scripts\") pod \"horizon-79f9946749-wjkgr\" (UID: \"c1df6cb2-f030-42cc-8e73-e1cbfd6d55ef\") " pod="openstack/horizon-79f9946749-wjkgr" Sep 29 19:15:23 crc kubenswrapper[4792]: I0929 19:15:23.875209 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/32da7a07-6996-432d-a9d3-121a2f952b4c-config-data\") pod \"glance-default-external-api-0\" (UID: \"32da7a07-6996-432d-a9d3-121a2f952b4c\") " pod="openstack/glance-default-external-api-0" Sep 29 19:15:23 crc kubenswrapper[4792]: I0929 19:15:23.875247 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c1df6cb2-f030-42cc-8e73-e1cbfd6d55ef-logs\") pod \"horizon-79f9946749-wjkgr\" (UID: \"c1df6cb2-f030-42cc-8e73-e1cbfd6d55ef\") " pod="openstack/horizon-79f9946749-wjkgr" Sep 29 19:15:23 crc kubenswrapper[4792]: I0929 19:15:23.875313 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/32da7a07-6996-432d-a9d3-121a2f952b4c-scripts\") pod \"glance-default-external-api-0\" (UID: \"32da7a07-6996-432d-a9d3-121a2f952b4c\") " pod="openstack/glance-default-external-api-0" Sep 29 19:15:23 crc kubenswrapper[4792]: I0929 19:15:23.875343 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/c1df6cb2-f030-42cc-8e73-e1cbfd6d55ef-horizon-secret-key\") pod \"horizon-79f9946749-wjkgr\" (UID: \"c1df6cb2-f030-42cc-8e73-e1cbfd6d55ef\") " pod="openstack/horizon-79f9946749-wjkgr" Sep 29 19:15:23 crc kubenswrapper[4792]: I0929 19:15:23.949654 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-56df8fb6b7-rh9hv" Sep 29 19:15:23 crc kubenswrapper[4792]: I0929 19:15:23.958663 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-5dhfl"] Sep 29 19:15:23 crc kubenswrapper[4792]: I0929 19:15:23.976686 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gsdpg\" (UniqueName: \"kubernetes.io/projected/c1df6cb2-f030-42cc-8e73-e1cbfd6d55ef-kube-api-access-gsdpg\") pod \"horizon-79f9946749-wjkgr\" (UID: \"c1df6cb2-f030-42cc-8e73-e1cbfd6d55ef\") " pod="openstack/horizon-79f9946749-wjkgr" Sep 29 19:15:23 crc kubenswrapper[4792]: I0929 19:15:23.977013 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/32da7a07-6996-432d-a9d3-121a2f952b4c-logs\") pod \"glance-default-external-api-0\" (UID: \"32da7a07-6996-432d-a9d3-121a2f952b4c\") " pod="openstack/glance-default-external-api-0" Sep 29 19:15:23 crc kubenswrapper[4792]: I0929 19:15:23.977039 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/32da7a07-6996-432d-a9d3-121a2f952b4c-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"32da7a07-6996-432d-a9d3-121a2f952b4c\") " pod="openstack/glance-default-external-api-0" Sep 29 19:15:23 crc kubenswrapper[4792]: I0929 19:15:23.977084 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h5hbg\" (UniqueName: \"kubernetes.io/projected/32da7a07-6996-432d-a9d3-121a2f952b4c-kube-api-access-h5hbg\") pod \"glance-default-external-api-0\" (UID: \"32da7a07-6996-432d-a9d3-121a2f952b4c\") " pod="openstack/glance-default-external-api-0" Sep 29 19:15:23 crc kubenswrapper[4792]: I0929 19:15:23.977109 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/c1df6cb2-f030-42cc-8e73-e1cbfd6d55ef-config-data\") pod \"horizon-79f9946749-wjkgr\" (UID: \"c1df6cb2-f030-42cc-8e73-e1cbfd6d55ef\") " pod="openstack/horizon-79f9946749-wjkgr" Sep 29 19:15:23 crc kubenswrapper[4792]: I0929 19:15:23.977129 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"glance-default-external-api-0\" (UID: \"32da7a07-6996-432d-a9d3-121a2f952b4c\") " pod="openstack/glance-default-external-api-0" Sep 29 19:15:23 crc kubenswrapper[4792]: I0929 19:15:23.977159 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/32da7a07-6996-432d-a9d3-121a2f952b4c-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"32da7a07-6996-432d-a9d3-121a2f952b4c\") " pod="openstack/glance-default-external-api-0" Sep 29 19:15:23 crc kubenswrapper[4792]: I0929 19:15:23.977180 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/32da7a07-6996-432d-a9d3-121a2f952b4c-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"32da7a07-6996-432d-a9d3-121a2f952b4c\") " pod="openstack/glance-default-external-api-0" Sep 29 19:15:23 crc kubenswrapper[4792]: I0929 19:15:23.977208 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/c1df6cb2-f030-42cc-8e73-e1cbfd6d55ef-scripts\") pod \"horizon-79f9946749-wjkgr\" (UID: \"c1df6cb2-f030-42cc-8e73-e1cbfd6d55ef\") " pod="openstack/horizon-79f9946749-wjkgr" Sep 29 19:15:23 crc kubenswrapper[4792]: I0929 19:15:23.977228 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/32da7a07-6996-432d-a9d3-121a2f952b4c-config-data\") pod \"glance-default-external-api-0\" (UID: \"32da7a07-6996-432d-a9d3-121a2f952b4c\") " pod="openstack/glance-default-external-api-0" Sep 29 19:15:23 crc kubenswrapper[4792]: I0929 19:15:23.977262 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c1df6cb2-f030-42cc-8e73-e1cbfd6d55ef-logs\") pod \"horizon-79f9946749-wjkgr\" (UID: \"c1df6cb2-f030-42cc-8e73-e1cbfd6d55ef\") " pod="openstack/horizon-79f9946749-wjkgr" Sep 29 19:15:23 crc kubenswrapper[4792]: I0929 19:15:23.977311 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/32da7a07-6996-432d-a9d3-121a2f952b4c-scripts\") pod \"glance-default-external-api-0\" (UID: \"32da7a07-6996-432d-a9d3-121a2f952b4c\") " pod="openstack/glance-default-external-api-0" Sep 29 19:15:23 crc kubenswrapper[4792]: I0929 19:15:23.977331 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/c1df6cb2-f030-42cc-8e73-e1cbfd6d55ef-horizon-secret-key\") pod \"horizon-79f9946749-wjkgr\" (UID: \"c1df6cb2-f030-42cc-8e73-e1cbfd6d55ef\") " pod="openstack/horizon-79f9946749-wjkgr" Sep 29 19:15:23 crc kubenswrapper[4792]: I0929 19:15:23.979937 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/32da7a07-6996-432d-a9d3-121a2f952b4c-logs\") pod \"glance-default-external-api-0\" (UID: \"32da7a07-6996-432d-a9d3-121a2f952b4c\") " pod="openstack/glance-default-external-api-0" Sep 29 19:15:23 crc kubenswrapper[4792]: I0929 19:15:23.980153 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/32da7a07-6996-432d-a9d3-121a2f952b4c-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"32da7a07-6996-432d-a9d3-121a2f952b4c\") " pod="openstack/glance-default-external-api-0" Sep 29 19:15:23 crc kubenswrapper[4792]: I0929 19:15:23.985611 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/c1df6cb2-f030-42cc-8e73-e1cbfd6d55ef-config-data\") pod \"horizon-79f9946749-wjkgr\" (UID: \"c1df6cb2-f030-42cc-8e73-e1cbfd6d55ef\") " pod="openstack/horizon-79f9946749-wjkgr" Sep 29 19:15:23 crc kubenswrapper[4792]: I0929 19:15:23.986400 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/c1df6cb2-f030-42cc-8e73-e1cbfd6d55ef-scripts\") pod \"horizon-79f9946749-wjkgr\" (UID: \"c1df6cb2-f030-42cc-8e73-e1cbfd6d55ef\") " pod="openstack/horizon-79f9946749-wjkgr" Sep 29 19:15:23 crc kubenswrapper[4792]: I0929 19:15:23.986621 4792 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"glance-default-external-api-0\" (UID: \"32da7a07-6996-432d-a9d3-121a2f952b4c\") device mount path \"/mnt/openstack/pv04\"" pod="openstack/glance-default-external-api-0" Sep 29 19:15:23 crc kubenswrapper[4792]: I0929 19:15:23.989133 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c1df6cb2-f030-42cc-8e73-e1cbfd6d55ef-logs\") pod \"horizon-79f9946749-wjkgr\" (UID: \"c1df6cb2-f030-42cc-8e73-e1cbfd6d55ef\") " pod="openstack/horizon-79f9946749-wjkgr" Sep 29 19:15:24 crc kubenswrapper[4792]: I0929 19:15:24.011715 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h5hbg\" (UniqueName: \"kubernetes.io/projected/32da7a07-6996-432d-a9d3-121a2f952b4c-kube-api-access-h5hbg\") pod \"glance-default-external-api-0\" (UID: \"32da7a07-6996-432d-a9d3-121a2f952b4c\") " pod="openstack/glance-default-external-api-0" Sep 29 19:15:24 crc kubenswrapper[4792]: I0929 19:15:24.014237 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/32da7a07-6996-432d-a9d3-121a2f952b4c-scripts\") pod \"glance-default-external-api-0\" (UID: \"32da7a07-6996-432d-a9d3-121a2f952b4c\") " pod="openstack/glance-default-external-api-0" Sep 29 19:15:24 crc kubenswrapper[4792]: I0929 19:15:24.018202 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/32da7a07-6996-432d-a9d3-121a2f952b4c-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"32da7a07-6996-432d-a9d3-121a2f952b4c\") " pod="openstack/glance-default-external-api-0" Sep 29 19:15:24 crc kubenswrapper[4792]: I0929 19:15:24.022749 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/32da7a07-6996-432d-a9d3-121a2f952b4c-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"32da7a07-6996-432d-a9d3-121a2f952b4c\") " pod="openstack/glance-default-external-api-0" Sep 29 19:15:24 crc kubenswrapper[4792]: I0929 19:15:24.023706 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/32da7a07-6996-432d-a9d3-121a2f952b4c-config-data\") pod \"glance-default-external-api-0\" (UID: \"32da7a07-6996-432d-a9d3-121a2f952b4c\") " pod="openstack/glance-default-external-api-0" Sep 29 19:15:24 crc kubenswrapper[4792]: I0929 19:15:24.024002 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/c1df6cb2-f030-42cc-8e73-e1cbfd6d55ef-horizon-secret-key\") pod \"horizon-79f9946749-wjkgr\" (UID: \"c1df6cb2-f030-42cc-8e73-e1cbfd6d55ef\") " pod="openstack/horizon-79f9946749-wjkgr" Sep 29 19:15:24 crc kubenswrapper[4792]: I0929 19:15:24.033158 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5f59b8f679-lwxwr" Sep 29 19:15:24 crc kubenswrapper[4792]: I0929 19:15:24.033728 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-neutron-dockercfg-jk6r4" Sep 29 19:15:24 crc kubenswrapper[4792]: I0929 19:15:24.039395 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-j6jb8" Sep 29 19:15:24 crc kubenswrapper[4792]: I0929 19:15:24.065174 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gsdpg\" (UniqueName: \"kubernetes.io/projected/c1df6cb2-f030-42cc-8e73-e1cbfd6d55ef-kube-api-access-gsdpg\") pod \"horizon-79f9946749-wjkgr\" (UID: \"c1df6cb2-f030-42cc-8e73-e1cbfd6d55ef\") " pod="openstack/horizon-79f9946749-wjkgr" Sep 29 19:15:24 crc kubenswrapper[4792]: I0929 19:15:24.078685 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-qwkmr" Sep 29 19:15:24 crc kubenswrapper[4792]: I0929 19:15:24.106609 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"glance-default-external-api-0\" (UID: \"32da7a07-6996-432d-a9d3-121a2f952b4c\") " pod="openstack/glance-default-external-api-0" Sep 29 19:15:24 crc kubenswrapper[4792]: I0929 19:15:24.158920 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-bbf5cc879-sz2kd"] Sep 29 19:15:24 crc kubenswrapper[4792]: I0929 19:15:24.183603 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/3c6ec0a6-e0bd-4183-9730-2a420004fcae-dns-swift-storage-0\") pod \"3c6ec0a6-e0bd-4183-9730-2a420004fcae\" (UID: \"3c6ec0a6-e0bd-4183-9730-2a420004fcae\") " Sep 29 19:15:24 crc kubenswrapper[4792]: I0929 19:15:24.183660 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/3c6ec0a6-e0bd-4183-9730-2a420004fcae-ovsdbserver-sb\") pod \"3c6ec0a6-e0bd-4183-9730-2a420004fcae\" (UID: \"3c6ec0a6-e0bd-4183-9730-2a420004fcae\") " Sep 29 19:15:24 crc kubenswrapper[4792]: I0929 19:15:24.183712 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/3c6ec0a6-e0bd-4183-9730-2a420004fcae-dns-svc\") pod \"3c6ec0a6-e0bd-4183-9730-2a420004fcae\" (UID: \"3c6ec0a6-e0bd-4183-9730-2a420004fcae\") " Sep 29 19:15:24 crc kubenswrapper[4792]: I0929 19:15:24.183771 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3c6ec0a6-e0bd-4183-9730-2a420004fcae-config\") pod \"3c6ec0a6-e0bd-4183-9730-2a420004fcae\" (UID: \"3c6ec0a6-e0bd-4183-9730-2a420004fcae\") " Sep 29 19:15:24 crc kubenswrapper[4792]: I0929 19:15:24.183804 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/3c6ec0a6-e0bd-4183-9730-2a420004fcae-ovsdbserver-nb\") pod \"3c6ec0a6-e0bd-4183-9730-2a420004fcae\" (UID: \"3c6ec0a6-e0bd-4183-9730-2a420004fcae\") " Sep 29 19:15:24 crc kubenswrapper[4792]: I0929 19:15:24.184537 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rhfvg\" (UniqueName: \"kubernetes.io/projected/3c6ec0a6-e0bd-4183-9730-2a420004fcae-kube-api-access-rhfvg\") pod \"3c6ec0a6-e0bd-4183-9730-2a420004fcae\" (UID: \"3c6ec0a6-e0bd-4183-9730-2a420004fcae\") " Sep 29 19:15:24 crc kubenswrapper[4792]: I0929 19:15:24.202986 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Sep 29 19:15:24 crc kubenswrapper[4792]: I0929 19:15:24.204246 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3c6ec0a6-e0bd-4183-9730-2a420004fcae-kube-api-access-rhfvg" (OuterVolumeSpecName: "kube-api-access-rhfvg") pod "3c6ec0a6-e0bd-4183-9730-2a420004fcae" (UID: "3c6ec0a6-e0bd-4183-9730-2a420004fcae"). InnerVolumeSpecName "kube-api-access-rhfvg". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:15:24 crc kubenswrapper[4792]: I0929 19:15:24.206789 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Sep 29 19:15:24 crc kubenswrapper[4792]: E0929 19:15:24.207826 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3c6ec0a6-e0bd-4183-9730-2a420004fcae" containerName="dnsmasq-dns" Sep 29 19:15:24 crc kubenswrapper[4792]: I0929 19:15:24.207841 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="3c6ec0a6-e0bd-4183-9730-2a420004fcae" containerName="dnsmasq-dns" Sep 29 19:15:24 crc kubenswrapper[4792]: E0929 19:15:24.207860 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3c6ec0a6-e0bd-4183-9730-2a420004fcae" containerName="init" Sep 29 19:15:24 crc kubenswrapper[4792]: I0929 19:15:24.207866 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="3c6ec0a6-e0bd-4183-9730-2a420004fcae" containerName="init" Sep 29 19:15:24 crc kubenswrapper[4792]: I0929 19:15:24.208035 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="3c6ec0a6-e0bd-4183-9730-2a420004fcae" containerName="dnsmasq-dns" Sep 29 19:15:24 crc kubenswrapper[4792]: I0929 19:15:24.209033 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Sep 29 19:15:24 crc kubenswrapper[4792]: I0929 19:15:24.214711 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Sep 29 19:15:24 crc kubenswrapper[4792]: I0929 19:15:24.215123 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-internal-svc" Sep 29 19:15:24 crc kubenswrapper[4792]: I0929 19:15:24.222905 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Sep 29 19:15:24 crc kubenswrapper[4792]: I0929 19:15:24.229391 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-79f9946749-wjkgr" Sep 29 19:15:24 crc kubenswrapper[4792]: I0929 19:15:24.298766 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fwt9g\" (UniqueName: \"kubernetes.io/projected/e93ef914-ef1e-40a1-a7d9-5866f9a6e454-kube-api-access-fwt9g\") pod \"glance-default-internal-api-0\" (UID: \"e93ef914-ef1e-40a1-a7d9-5866f9a6e454\") " pod="openstack/glance-default-internal-api-0" Sep 29 19:15:24 crc kubenswrapper[4792]: I0929 19:15:24.299030 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"glance-default-internal-api-0\" (UID: \"e93ef914-ef1e-40a1-a7d9-5866f9a6e454\") " pod="openstack/glance-default-internal-api-0" Sep 29 19:15:24 crc kubenswrapper[4792]: I0929 19:15:24.299144 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e93ef914-ef1e-40a1-a7d9-5866f9a6e454-config-data\") pod \"glance-default-internal-api-0\" (UID: \"e93ef914-ef1e-40a1-a7d9-5866f9a6e454\") " pod="openstack/glance-default-internal-api-0" Sep 29 19:15:24 crc kubenswrapper[4792]: I0929 19:15:24.299224 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e93ef914-ef1e-40a1-a7d9-5866f9a6e454-logs\") pod \"glance-default-internal-api-0\" (UID: \"e93ef914-ef1e-40a1-a7d9-5866f9a6e454\") " pod="openstack/glance-default-internal-api-0" Sep 29 19:15:24 crc kubenswrapper[4792]: I0929 19:15:24.299259 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/e93ef914-ef1e-40a1-a7d9-5866f9a6e454-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"e93ef914-ef1e-40a1-a7d9-5866f9a6e454\") " pod="openstack/glance-default-internal-api-0" Sep 29 19:15:24 crc kubenswrapper[4792]: I0929 19:15:24.299290 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/e93ef914-ef1e-40a1-a7d9-5866f9a6e454-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"e93ef914-ef1e-40a1-a7d9-5866f9a6e454\") " pod="openstack/glance-default-internal-api-0" Sep 29 19:15:24 crc kubenswrapper[4792]: I0929 19:15:24.299362 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e93ef914-ef1e-40a1-a7d9-5866f9a6e454-scripts\") pod \"glance-default-internal-api-0\" (UID: \"e93ef914-ef1e-40a1-a7d9-5866f9a6e454\") " pod="openstack/glance-default-internal-api-0" Sep 29 19:15:24 crc kubenswrapper[4792]: I0929 19:15:24.299384 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e93ef914-ef1e-40a1-a7d9-5866f9a6e454-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"e93ef914-ef1e-40a1-a7d9-5866f9a6e454\") " pod="openstack/glance-default-internal-api-0" Sep 29 19:15:24 crc kubenswrapper[4792]: I0929 19:15:24.299432 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rhfvg\" (UniqueName: \"kubernetes.io/projected/3c6ec0a6-e0bd-4183-9730-2a420004fcae-kube-api-access-rhfvg\") on node \"crc\" DevicePath \"\"" Sep 29 19:15:24 crc kubenswrapper[4792]: I0929 19:15:24.400342 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e93ef914-ef1e-40a1-a7d9-5866f9a6e454-scripts\") pod \"glance-default-internal-api-0\" (UID: \"e93ef914-ef1e-40a1-a7d9-5866f9a6e454\") " pod="openstack/glance-default-internal-api-0" Sep 29 19:15:24 crc kubenswrapper[4792]: I0929 19:15:24.400382 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e93ef914-ef1e-40a1-a7d9-5866f9a6e454-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"e93ef914-ef1e-40a1-a7d9-5866f9a6e454\") " pod="openstack/glance-default-internal-api-0" Sep 29 19:15:24 crc kubenswrapper[4792]: I0929 19:15:24.400436 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fwt9g\" (UniqueName: \"kubernetes.io/projected/e93ef914-ef1e-40a1-a7d9-5866f9a6e454-kube-api-access-fwt9g\") pod \"glance-default-internal-api-0\" (UID: \"e93ef914-ef1e-40a1-a7d9-5866f9a6e454\") " pod="openstack/glance-default-internal-api-0" Sep 29 19:15:24 crc kubenswrapper[4792]: I0929 19:15:24.400453 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"glance-default-internal-api-0\" (UID: \"e93ef914-ef1e-40a1-a7d9-5866f9a6e454\") " pod="openstack/glance-default-internal-api-0" Sep 29 19:15:24 crc kubenswrapper[4792]: I0929 19:15:24.400503 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e93ef914-ef1e-40a1-a7d9-5866f9a6e454-config-data\") pod \"glance-default-internal-api-0\" (UID: \"e93ef914-ef1e-40a1-a7d9-5866f9a6e454\") " pod="openstack/glance-default-internal-api-0" Sep 29 19:15:24 crc kubenswrapper[4792]: I0929 19:15:24.400541 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e93ef914-ef1e-40a1-a7d9-5866f9a6e454-logs\") pod \"glance-default-internal-api-0\" (UID: \"e93ef914-ef1e-40a1-a7d9-5866f9a6e454\") " pod="openstack/glance-default-internal-api-0" Sep 29 19:15:24 crc kubenswrapper[4792]: I0929 19:15:24.400562 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/e93ef914-ef1e-40a1-a7d9-5866f9a6e454-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"e93ef914-ef1e-40a1-a7d9-5866f9a6e454\") " pod="openstack/glance-default-internal-api-0" Sep 29 19:15:24 crc kubenswrapper[4792]: I0929 19:15:24.400582 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/e93ef914-ef1e-40a1-a7d9-5866f9a6e454-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"e93ef914-ef1e-40a1-a7d9-5866f9a6e454\") " pod="openstack/glance-default-internal-api-0" Sep 29 19:15:24 crc kubenswrapper[4792]: I0929 19:15:24.401063 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/e93ef914-ef1e-40a1-a7d9-5866f9a6e454-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"e93ef914-ef1e-40a1-a7d9-5866f9a6e454\") " pod="openstack/glance-default-internal-api-0" Sep 29 19:15:24 crc kubenswrapper[4792]: I0929 19:15:24.401191 4792 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"glance-default-internal-api-0\" (UID: \"e93ef914-ef1e-40a1-a7d9-5866f9a6e454\") device mount path \"/mnt/openstack/pv12\"" pod="openstack/glance-default-internal-api-0" Sep 29 19:15:24 crc kubenswrapper[4792]: I0929 19:15:24.402810 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e93ef914-ef1e-40a1-a7d9-5866f9a6e454-logs\") pod \"glance-default-internal-api-0\" (UID: \"e93ef914-ef1e-40a1-a7d9-5866f9a6e454\") " pod="openstack/glance-default-internal-api-0" Sep 29 19:15:24 crc kubenswrapper[4792]: I0929 19:15:24.405274 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-7486b4cb6c-fr9s4"] Sep 29 19:15:24 crc kubenswrapper[4792]: I0929 19:15:24.415729 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/e93ef914-ef1e-40a1-a7d9-5866f9a6e454-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"e93ef914-ef1e-40a1-a7d9-5866f9a6e454\") " pod="openstack/glance-default-internal-api-0" Sep 29 19:15:24 crc kubenswrapper[4792]: I0929 19:15:24.415915 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e93ef914-ef1e-40a1-a7d9-5866f9a6e454-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"e93ef914-ef1e-40a1-a7d9-5866f9a6e454\") " pod="openstack/glance-default-internal-api-0" Sep 29 19:15:24 crc kubenswrapper[4792]: I0929 19:15:24.425765 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e93ef914-ef1e-40a1-a7d9-5866f9a6e454-scripts\") pod \"glance-default-internal-api-0\" (UID: \"e93ef914-ef1e-40a1-a7d9-5866f9a6e454\") " pod="openstack/glance-default-internal-api-0" Sep 29 19:15:24 crc kubenswrapper[4792]: I0929 19:15:24.433520 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e93ef914-ef1e-40a1-a7d9-5866f9a6e454-config-data\") pod \"glance-default-internal-api-0\" (UID: \"e93ef914-ef1e-40a1-a7d9-5866f9a6e454\") " pod="openstack/glance-default-internal-api-0" Sep 29 19:15:24 crc kubenswrapper[4792]: I0929 19:15:24.435644 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fwt9g\" (UniqueName: \"kubernetes.io/projected/e93ef914-ef1e-40a1-a7d9-5866f9a6e454-kube-api-access-fwt9g\") pod \"glance-default-internal-api-0\" (UID: \"e93ef914-ef1e-40a1-a7d9-5866f9a6e454\") " pod="openstack/glance-default-internal-api-0" Sep 29 19:15:24 crc kubenswrapper[4792]: I0929 19:15:24.462166 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-5dhfl" event={"ID":"e5b356b4-9ba6-4971-966f-be9d4160b57d","Type":"ContainerStarted","Data":"5c2ac36592efd87c9b72d533b940bf070811aa295ce51e0dc083be35063aa9ad"} Sep 29 19:15:24 crc kubenswrapper[4792]: I0929 19:15:24.474924 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5f59b8f679-lwxwr" event={"ID":"3c6ec0a6-e0bd-4183-9730-2a420004fcae","Type":"ContainerDied","Data":"2cc6be9576da7b94bbb5937c47a05a178db8e1440d3cd3be3acf0818c2bdbe47"} Sep 29 19:15:24 crc kubenswrapper[4792]: I0929 19:15:24.474972 4792 scope.go:117] "RemoveContainer" containerID="445bba3cfb494ada92f61028b400aab7f98cea6e03fce8c20bbc624a04a73c55" Sep 29 19:15:24 crc kubenswrapper[4792]: I0929 19:15:24.475155 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5f59b8f679-lwxwr" Sep 29 19:15:24 crc kubenswrapper[4792]: I0929 19:15:24.482868 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-bbf5cc879-sz2kd" event={"ID":"f9c73d3d-cf62-4d9c-8710-31fab6a99650","Type":"ContainerStarted","Data":"53ad38637b374f1f6da614b3bd959f511f2999b777417c894a782fd87004f818"} Sep 29 19:15:24 crc kubenswrapper[4792]: I0929 19:15:24.523541 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-httpd-config" Sep 29 19:15:24 crc kubenswrapper[4792]: I0929 19:15:24.538577 4792 scope.go:117] "RemoveContainer" containerID="db32ad89f74f89be92052d940e995f43f6bbdea5b643bac23fdb00b57e0b73cd" Sep 29 19:15:24 crc kubenswrapper[4792]: I0929 19:15:24.564082 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3c6ec0a6-e0bd-4183-9730-2a420004fcae-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "3c6ec0a6-e0bd-4183-9730-2a420004fcae" (UID: "3c6ec0a6-e0bd-4183-9730-2a420004fcae"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:15:24 crc kubenswrapper[4792]: I0929 19:15:24.593332 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3c6ec0a6-e0bd-4183-9730-2a420004fcae-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "3c6ec0a6-e0bd-4183-9730-2a420004fcae" (UID: "3c6ec0a6-e0bd-4183-9730-2a420004fcae"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:15:24 crc kubenswrapper[4792]: I0929 19:15:24.615316 4792 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/3c6ec0a6-e0bd-4183-9730-2a420004fcae-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Sep 29 19:15:24 crc kubenswrapper[4792]: I0929 19:15:24.615349 4792 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/3c6ec0a6-e0bd-4183-9730-2a420004fcae-dns-svc\") on node \"crc\" DevicePath \"\"" Sep 29 19:15:24 crc kubenswrapper[4792]: I0929 19:15:24.623648 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3c6ec0a6-e0bd-4183-9730-2a420004fcae-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "3c6ec0a6-e0bd-4183-9730-2a420004fcae" (UID: "3c6ec0a6-e0bd-4183-9730-2a420004fcae"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:15:24 crc kubenswrapper[4792]: I0929 19:15:24.628811 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"glance-default-internal-api-0\" (UID: \"e93ef914-ef1e-40a1-a7d9-5866f9a6e454\") " pod="openstack/glance-default-internal-api-0" Sep 29 19:15:24 crc kubenswrapper[4792]: I0929 19:15:24.631865 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-sync-cvlgm"] Sep 29 19:15:24 crc kubenswrapper[4792]: I0929 19:15:24.644482 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3c6ec0a6-e0bd-4183-9730-2a420004fcae-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "3c6ec0a6-e0bd-4183-9730-2a420004fcae" (UID: "3c6ec0a6-e0bd-4183-9730-2a420004fcae"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:15:24 crc kubenswrapper[4792]: I0929 19:15:24.647319 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3c6ec0a6-e0bd-4183-9730-2a420004fcae-config" (OuterVolumeSpecName: "config") pod "3c6ec0a6-e0bd-4183-9730-2a420004fcae" (UID: "3c6ec0a6-e0bd-4183-9730-2a420004fcae"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:15:24 crc kubenswrapper[4792]: I0929 19:15:24.723906 4792 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/3c6ec0a6-e0bd-4183-9730-2a420004fcae-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Sep 29 19:15:24 crc kubenswrapper[4792]: I0929 19:15:24.723952 4792 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3c6ec0a6-e0bd-4183-9730-2a420004fcae-config\") on node \"crc\" DevicePath \"\"" Sep 29 19:15:24 crc kubenswrapper[4792]: I0929 19:15:24.723962 4792 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/3c6ec0a6-e0bd-4183-9730-2a420004fcae-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Sep 29 19:15:24 crc kubenswrapper[4792]: I0929 19:15:24.844409 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Sep 29 19:15:24 crc kubenswrapper[4792]: I0929 19:15:24.861801 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5f59b8f679-lwxwr"] Sep 29 19:15:24 crc kubenswrapper[4792]: I0929 19:15:24.883460 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5f59b8f679-lwxwr"] Sep 29 19:15:24 crc kubenswrapper[4792]: I0929 19:15:24.911112 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Sep 29 19:15:24 crc kubenswrapper[4792]: I0929 19:15:24.973961 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-sync-qwkmr"] Sep 29 19:15:25 crc kubenswrapper[4792]: I0929 19:15:25.014727 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-sync-5bsn5"] Sep 29 19:15:25 crc kubenswrapper[4792]: I0929 19:15:25.053512 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3c6ec0a6-e0bd-4183-9730-2a420004fcae" path="/var/lib/kubelet/pods/3c6ec0a6-e0bd-4183-9730-2a420004fcae/volumes" Sep 29 19:15:25 crc kubenswrapper[4792]: I0929 19:15:25.382201 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-sync-j6jb8"] Sep 29 19:15:25 crc kubenswrapper[4792]: I0929 19:15:25.416202 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-79f9946749-wjkgr"] Sep 29 19:15:25 crc kubenswrapper[4792]: I0929 19:15:25.456524 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-56df8fb6b7-rh9hv"] Sep 29 19:15:25 crc kubenswrapper[4792]: W0929 19:15:25.486930 4792 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podfc9bf108_a60d_4111_b064_f37789e2d7c1.slice/crio-f0e9707349865bb460b60a1ff263783a2be29ca2beeaef17326ad96c5c93bf56 WatchSource:0}: Error finding container f0e9707349865bb460b60a1ff263783a2be29ca2beeaef17326ad96c5c93bf56: Status 404 returned error can't find the container with id f0e9707349865bb460b60a1ff263783a2be29ca2beeaef17326ad96c5c93bf56 Sep 29 19:15:25 crc kubenswrapper[4792]: I0929 19:15:25.498314 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-cvlgm" event={"ID":"bd0405ab-8be9-41cd-aa4d-7cbe44be3049","Type":"ContainerStarted","Data":"ed5d3e8c6df3780d0c885ca3d0c7b56e23a627ba00fb6f6824b6ebcaca94bd85"} Sep 29 19:15:25 crc kubenswrapper[4792]: I0929 19:15:25.510835 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-5dhfl" event={"ID":"e5b356b4-9ba6-4971-966f-be9d4160b57d","Type":"ContainerStarted","Data":"d72f423cf06a89a52dbdb61789a4ada9f3ad5c2e26307d79817fdcd764d326aa"} Sep 29 19:15:25 crc kubenswrapper[4792]: I0929 19:15:25.519341 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-7486b4cb6c-fr9s4" event={"ID":"9953fe09-a48c-4c74-83a1-9de5e8cec46d","Type":"ContainerStarted","Data":"03d246cacb1bd0e828f9a741d435516977747e91531292610332fb7ca838f15e"} Sep 29 19:15:25 crc kubenswrapper[4792]: I0929 19:15:25.521903 4792 generic.go:334] "Generic (PLEG): container finished" podID="f9c73d3d-cf62-4d9c-8710-31fab6a99650" containerID="9b8bccede1f592b6038557524b0ff09c1339cb22c54a4703266bfd0e173f0496" exitCode=0 Sep 29 19:15:25 crc kubenswrapper[4792]: I0929 19:15:25.522161 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-bbf5cc879-sz2kd" event={"ID":"f9c73d3d-cf62-4d9c-8710-31fab6a99650","Type":"ContainerDied","Data":"9b8bccede1f592b6038557524b0ff09c1339cb22c54a4703266bfd0e173f0496"} Sep 29 19:15:25 crc kubenswrapper[4792]: I0929 19:15:25.539973 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-bootstrap-5dhfl" podStartSLOduration=3.539955633 podStartE2EDuration="3.539955633s" podCreationTimestamp="2025-09-29 19:15:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 19:15:25.530024298 +0000 UTC m=+1137.523331694" watchObservedRunningTime="2025-09-29 19:15:25.539955633 +0000 UTC m=+1137.533263029" Sep 29 19:15:25 crc kubenswrapper[4792]: I0929 19:15:25.549089 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"06538688-0bb7-45ae-a249-94ba5c312b2b","Type":"ContainerStarted","Data":"47848e75a05b2a9e7c5bd2dda787011c4dd5a3822b0a55c3bf757e889d4ad62d"} Sep 29 19:15:25 crc kubenswrapper[4792]: I0929 19:15:25.557192 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-5bsn5" event={"ID":"3c3fc253-fb19-4845-a099-4754b7a55cdb","Type":"ContainerStarted","Data":"ed033cb19fc1608719b10c5e51150efc9caaa419f9253fd09ba1deaac8974f3f"} Sep 29 19:15:25 crc kubenswrapper[4792]: I0929 19:15:25.587510 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-79f9946749-wjkgr" event={"ID":"c1df6cb2-f030-42cc-8e73-e1cbfd6d55ef","Type":"ContainerStarted","Data":"facaa116af583eb6a28415b5f8bf5fc447fe664a35eedb0491a6effbed125dce"} Sep 29 19:15:25 crc kubenswrapper[4792]: I0929 19:15:25.626819 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-j6jb8" event={"ID":"72bc741a-4542-48fb-b65c-c7a12570d80a","Type":"ContainerStarted","Data":"feb60b91ffb62bd44316b44c35cb567325abf31ea93a2e7b09c4c11b43639e30"} Sep 29 19:15:25 crc kubenswrapper[4792]: I0929 19:15:25.643055 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Sep 29 19:15:25 crc kubenswrapper[4792]: I0929 19:15:25.682608 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-qwkmr" event={"ID":"b45ae86a-64ce-47be-a568-021cf9da5107","Type":"ContainerStarted","Data":"739b1818966f4b471020cf3e4319a81094cd6ecf4857c233adb5432442c0c564"} Sep 29 19:15:25 crc kubenswrapper[4792]: I0929 19:15:25.785022 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Sep 29 19:15:26 crc kubenswrapper[4792]: I0929 19:15:26.182372 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-bbf5cc879-sz2kd" Sep 29 19:15:26 crc kubenswrapper[4792]: I0929 19:15:26.315667 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2kvvt\" (UniqueName: \"kubernetes.io/projected/f9c73d3d-cf62-4d9c-8710-31fab6a99650-kube-api-access-2kvvt\") pod \"f9c73d3d-cf62-4d9c-8710-31fab6a99650\" (UID: \"f9c73d3d-cf62-4d9c-8710-31fab6a99650\") " Sep 29 19:15:26 crc kubenswrapper[4792]: I0929 19:15:26.315829 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/f9c73d3d-cf62-4d9c-8710-31fab6a99650-ovsdbserver-nb\") pod \"f9c73d3d-cf62-4d9c-8710-31fab6a99650\" (UID: \"f9c73d3d-cf62-4d9c-8710-31fab6a99650\") " Sep 29 19:15:26 crc kubenswrapper[4792]: I0929 19:15:26.315904 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/f9c73d3d-cf62-4d9c-8710-31fab6a99650-ovsdbserver-sb\") pod \"f9c73d3d-cf62-4d9c-8710-31fab6a99650\" (UID: \"f9c73d3d-cf62-4d9c-8710-31fab6a99650\") " Sep 29 19:15:26 crc kubenswrapper[4792]: I0929 19:15:26.315950 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f9c73d3d-cf62-4d9c-8710-31fab6a99650-config\") pod \"f9c73d3d-cf62-4d9c-8710-31fab6a99650\" (UID: \"f9c73d3d-cf62-4d9c-8710-31fab6a99650\") " Sep 29 19:15:26 crc kubenswrapper[4792]: I0929 19:15:26.315983 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f9c73d3d-cf62-4d9c-8710-31fab6a99650-dns-svc\") pod \"f9c73d3d-cf62-4d9c-8710-31fab6a99650\" (UID: \"f9c73d3d-cf62-4d9c-8710-31fab6a99650\") " Sep 29 19:15:26 crc kubenswrapper[4792]: I0929 19:15:26.316010 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/f9c73d3d-cf62-4d9c-8710-31fab6a99650-dns-swift-storage-0\") pod \"f9c73d3d-cf62-4d9c-8710-31fab6a99650\" (UID: \"f9c73d3d-cf62-4d9c-8710-31fab6a99650\") " Sep 29 19:15:26 crc kubenswrapper[4792]: I0929 19:15:26.359658 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f9c73d3d-cf62-4d9c-8710-31fab6a99650-kube-api-access-2kvvt" (OuterVolumeSpecName: "kube-api-access-2kvvt") pod "f9c73d3d-cf62-4d9c-8710-31fab6a99650" (UID: "f9c73d3d-cf62-4d9c-8710-31fab6a99650"). InnerVolumeSpecName "kube-api-access-2kvvt". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:15:26 crc kubenswrapper[4792]: I0929 19:15:26.370725 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f9c73d3d-cf62-4d9c-8710-31fab6a99650-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "f9c73d3d-cf62-4d9c-8710-31fab6a99650" (UID: "f9c73d3d-cf62-4d9c-8710-31fab6a99650"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:15:26 crc kubenswrapper[4792]: I0929 19:15:26.391989 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f9c73d3d-cf62-4d9c-8710-31fab6a99650-config" (OuterVolumeSpecName: "config") pod "f9c73d3d-cf62-4d9c-8710-31fab6a99650" (UID: "f9c73d3d-cf62-4d9c-8710-31fab6a99650"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:15:26 crc kubenswrapper[4792]: I0929 19:15:26.394067 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f9c73d3d-cf62-4d9c-8710-31fab6a99650-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "f9c73d3d-cf62-4d9c-8710-31fab6a99650" (UID: "f9c73d3d-cf62-4d9c-8710-31fab6a99650"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:15:26 crc kubenswrapper[4792]: I0929 19:15:26.398371 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f9c73d3d-cf62-4d9c-8710-31fab6a99650-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "f9c73d3d-cf62-4d9c-8710-31fab6a99650" (UID: "f9c73d3d-cf62-4d9c-8710-31fab6a99650"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:15:26 crc kubenswrapper[4792]: I0929 19:15:26.402866 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f9c73d3d-cf62-4d9c-8710-31fab6a99650-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "f9c73d3d-cf62-4d9c-8710-31fab6a99650" (UID: "f9c73d3d-cf62-4d9c-8710-31fab6a99650"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:15:26 crc kubenswrapper[4792]: I0929 19:15:26.423371 4792 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/f9c73d3d-cf62-4d9c-8710-31fab6a99650-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Sep 29 19:15:26 crc kubenswrapper[4792]: I0929 19:15:26.423426 4792 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f9c73d3d-cf62-4d9c-8710-31fab6a99650-config\") on node \"crc\" DevicePath \"\"" Sep 29 19:15:26 crc kubenswrapper[4792]: I0929 19:15:26.423439 4792 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f9c73d3d-cf62-4d9c-8710-31fab6a99650-dns-svc\") on node \"crc\" DevicePath \"\"" Sep 29 19:15:26 crc kubenswrapper[4792]: I0929 19:15:26.423451 4792 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/f9c73d3d-cf62-4d9c-8710-31fab6a99650-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Sep 29 19:15:26 crc kubenswrapper[4792]: I0929 19:15:26.423462 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2kvvt\" (UniqueName: \"kubernetes.io/projected/f9c73d3d-cf62-4d9c-8710-31fab6a99650-kube-api-access-2kvvt\") on node \"crc\" DevicePath \"\"" Sep 29 19:15:26 crc kubenswrapper[4792]: I0929 19:15:26.423476 4792 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/f9c73d3d-cf62-4d9c-8710-31fab6a99650-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Sep 29 19:15:26 crc kubenswrapper[4792]: I0929 19:15:26.773388 4792 generic.go:334] "Generic (PLEG): container finished" podID="fc9bf108-a60d-4111-b064-f37789e2d7c1" containerID="0a4ebe62cd782eb9c1393c70cccbc338fd92640d7163d46a149daaaf9db2d5db" exitCode=0 Sep 29 19:15:26 crc kubenswrapper[4792]: I0929 19:15:26.773488 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-56df8fb6b7-rh9hv" event={"ID":"fc9bf108-a60d-4111-b064-f37789e2d7c1","Type":"ContainerDied","Data":"0a4ebe62cd782eb9c1393c70cccbc338fd92640d7163d46a149daaaf9db2d5db"} Sep 29 19:15:26 crc kubenswrapper[4792]: I0929 19:15:26.773534 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-56df8fb6b7-rh9hv" event={"ID":"fc9bf108-a60d-4111-b064-f37789e2d7c1","Type":"ContainerStarted","Data":"f0e9707349865bb460b60a1ff263783a2be29ca2beeaef17326ad96c5c93bf56"} Sep 29 19:15:26 crc kubenswrapper[4792]: I0929 19:15:26.808986 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"32da7a07-6996-432d-a9d3-121a2f952b4c","Type":"ContainerStarted","Data":"d5cb41a1b17efeaa36fc38084d713f967eb3cc0508ade4b1039559054963f57c"} Sep 29 19:15:26 crc kubenswrapper[4792]: I0929 19:15:26.840045 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-j6jb8" event={"ID":"72bc741a-4542-48fb-b65c-c7a12570d80a","Type":"ContainerStarted","Data":"6a5885959acd64b15fe20cfb3870e826808b5a9916e3c522a51fba99a1bcbcf7"} Sep 29 19:15:26 crc kubenswrapper[4792]: I0929 19:15:26.875912 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-bbf5cc879-sz2kd" event={"ID":"f9c73d3d-cf62-4d9c-8710-31fab6a99650","Type":"ContainerDied","Data":"53ad38637b374f1f6da614b3bd959f511f2999b777417c894a782fd87004f818"} Sep 29 19:15:26 crc kubenswrapper[4792]: I0929 19:15:26.875961 4792 scope.go:117] "RemoveContainer" containerID="9b8bccede1f592b6038557524b0ff09c1339cb22c54a4703266bfd0e173f0496" Sep 29 19:15:26 crc kubenswrapper[4792]: I0929 19:15:26.876070 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-bbf5cc879-sz2kd" Sep 29 19:15:26 crc kubenswrapper[4792]: I0929 19:15:26.894797 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"e93ef914-ef1e-40a1-a7d9-5866f9a6e454","Type":"ContainerStarted","Data":"2abb559e23953eedcb6cb39c43fe6f72d74b27ac8d87c2592ce9ae4e608a691e"} Sep 29 19:15:26 crc kubenswrapper[4792]: I0929 19:15:26.983479 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-db-sync-j6jb8" podStartSLOduration=4.983454359 podStartE2EDuration="4.983454359s" podCreationTimestamp="2025-09-29 19:15:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 19:15:26.928930325 +0000 UTC m=+1138.922237731" watchObservedRunningTime="2025-09-29 19:15:26.983454359 +0000 UTC m=+1138.976761745" Sep 29 19:15:27 crc kubenswrapper[4792]: I0929 19:15:27.279310 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-bbf5cc879-sz2kd"] Sep 29 19:15:27 crc kubenswrapper[4792]: I0929 19:15:27.307469 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-bbf5cc879-sz2kd"] Sep 29 19:15:27 crc kubenswrapper[4792]: I0929 19:15:27.413288 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Sep 29 19:15:27 crc kubenswrapper[4792]: I0929 19:15:27.688111 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-7486b4cb6c-fr9s4"] Sep 29 19:15:27 crc kubenswrapper[4792]: I0929 19:15:27.711774 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Sep 29 19:15:27 crc kubenswrapper[4792]: I0929 19:15:27.744475 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-575755669c-6b5g8"] Sep 29 19:15:27 crc kubenswrapper[4792]: E0929 19:15:27.745175 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f9c73d3d-cf62-4d9c-8710-31fab6a99650" containerName="init" Sep 29 19:15:27 crc kubenswrapper[4792]: I0929 19:15:27.745191 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="f9c73d3d-cf62-4d9c-8710-31fab6a99650" containerName="init" Sep 29 19:15:27 crc kubenswrapper[4792]: I0929 19:15:27.745375 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="f9c73d3d-cf62-4d9c-8710-31fab6a99650" containerName="init" Sep 29 19:15:27 crc kubenswrapper[4792]: I0929 19:15:27.746268 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-575755669c-6b5g8" Sep 29 19:15:27 crc kubenswrapper[4792]: I0929 19:15:27.779317 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Sep 29 19:15:27 crc kubenswrapper[4792]: I0929 19:15:27.794969 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-575755669c-6b5g8"] Sep 29 19:15:27 crc kubenswrapper[4792]: I0929 19:15:27.871757 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/03017320-13cd-4880-80e9-0834cb41a6bd-horizon-secret-key\") pod \"horizon-575755669c-6b5g8\" (UID: \"03017320-13cd-4880-80e9-0834cb41a6bd\") " pod="openstack/horizon-575755669c-6b5g8" Sep 29 19:15:27 crc kubenswrapper[4792]: I0929 19:15:27.871810 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6npsj\" (UniqueName: \"kubernetes.io/projected/03017320-13cd-4880-80e9-0834cb41a6bd-kube-api-access-6npsj\") pod \"horizon-575755669c-6b5g8\" (UID: \"03017320-13cd-4880-80e9-0834cb41a6bd\") " pod="openstack/horizon-575755669c-6b5g8" Sep 29 19:15:27 crc kubenswrapper[4792]: I0929 19:15:27.871915 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/03017320-13cd-4880-80e9-0834cb41a6bd-logs\") pod \"horizon-575755669c-6b5g8\" (UID: \"03017320-13cd-4880-80e9-0834cb41a6bd\") " pod="openstack/horizon-575755669c-6b5g8" Sep 29 19:15:27 crc kubenswrapper[4792]: I0929 19:15:27.871954 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/03017320-13cd-4880-80e9-0834cb41a6bd-scripts\") pod \"horizon-575755669c-6b5g8\" (UID: \"03017320-13cd-4880-80e9-0834cb41a6bd\") " pod="openstack/horizon-575755669c-6b5g8" Sep 29 19:15:27 crc kubenswrapper[4792]: I0929 19:15:27.871980 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/03017320-13cd-4880-80e9-0834cb41a6bd-config-data\") pod \"horizon-575755669c-6b5g8\" (UID: \"03017320-13cd-4880-80e9-0834cb41a6bd\") " pod="openstack/horizon-575755669c-6b5g8" Sep 29 19:15:27 crc kubenswrapper[4792]: I0929 19:15:27.939721 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"32da7a07-6996-432d-a9d3-121a2f952b4c","Type":"ContainerStarted","Data":"cc20a227c88234a2e2f6da7a8eecb6bc5259ad27eb3b3bd89926559b7daeb6c2"} Sep 29 19:15:27 crc kubenswrapper[4792]: I0929 19:15:27.947092 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-56df8fb6b7-rh9hv" event={"ID":"fc9bf108-a60d-4111-b064-f37789e2d7c1","Type":"ContainerStarted","Data":"ac99206b4580e0813cf9ec4a8332cbb6600a529c76e7de3d16dcd402f5c85615"} Sep 29 19:15:27 crc kubenswrapper[4792]: I0929 19:15:27.947145 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-56df8fb6b7-rh9hv" Sep 29 19:15:27 crc kubenswrapper[4792]: I0929 19:15:27.977760 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/03017320-13cd-4880-80e9-0834cb41a6bd-logs\") pod \"horizon-575755669c-6b5g8\" (UID: \"03017320-13cd-4880-80e9-0834cb41a6bd\") " pod="openstack/horizon-575755669c-6b5g8" Sep 29 19:15:27 crc kubenswrapper[4792]: I0929 19:15:27.977825 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/03017320-13cd-4880-80e9-0834cb41a6bd-scripts\") pod \"horizon-575755669c-6b5g8\" (UID: \"03017320-13cd-4880-80e9-0834cb41a6bd\") " pod="openstack/horizon-575755669c-6b5g8" Sep 29 19:15:27 crc kubenswrapper[4792]: I0929 19:15:27.978344 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/03017320-13cd-4880-80e9-0834cb41a6bd-logs\") pod \"horizon-575755669c-6b5g8\" (UID: \"03017320-13cd-4880-80e9-0834cb41a6bd\") " pod="openstack/horizon-575755669c-6b5g8" Sep 29 19:15:27 crc kubenswrapper[4792]: I0929 19:15:27.980517 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/03017320-13cd-4880-80e9-0834cb41a6bd-config-data\") pod \"horizon-575755669c-6b5g8\" (UID: \"03017320-13cd-4880-80e9-0834cb41a6bd\") " pod="openstack/horizon-575755669c-6b5g8" Sep 29 19:15:27 crc kubenswrapper[4792]: I0929 19:15:27.980808 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/03017320-13cd-4880-80e9-0834cb41a6bd-horizon-secret-key\") pod \"horizon-575755669c-6b5g8\" (UID: \"03017320-13cd-4880-80e9-0834cb41a6bd\") " pod="openstack/horizon-575755669c-6b5g8" Sep 29 19:15:27 crc kubenswrapper[4792]: I0929 19:15:27.980889 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6npsj\" (UniqueName: \"kubernetes.io/projected/03017320-13cd-4880-80e9-0834cb41a6bd-kube-api-access-6npsj\") pod \"horizon-575755669c-6b5g8\" (UID: \"03017320-13cd-4880-80e9-0834cb41a6bd\") " pod="openstack/horizon-575755669c-6b5g8" Sep 29 19:15:27 crc kubenswrapper[4792]: I0929 19:15:27.981587 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/03017320-13cd-4880-80e9-0834cb41a6bd-config-data\") pod \"horizon-575755669c-6b5g8\" (UID: \"03017320-13cd-4880-80e9-0834cb41a6bd\") " pod="openstack/horizon-575755669c-6b5g8" Sep 29 19:15:27 crc kubenswrapper[4792]: I0929 19:15:27.981729 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-56df8fb6b7-rh9hv" podStartSLOduration=4.9817185120000005 podStartE2EDuration="4.981718512s" podCreationTimestamp="2025-09-29 19:15:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 19:15:27.977388097 +0000 UTC m=+1139.970695513" watchObservedRunningTime="2025-09-29 19:15:27.981718512 +0000 UTC m=+1139.975025908" Sep 29 19:15:27 crc kubenswrapper[4792]: I0929 19:15:27.981946 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/03017320-13cd-4880-80e9-0834cb41a6bd-scripts\") pod \"horizon-575755669c-6b5g8\" (UID: \"03017320-13cd-4880-80e9-0834cb41a6bd\") " pod="openstack/horizon-575755669c-6b5g8" Sep 29 19:15:28 crc kubenswrapper[4792]: I0929 19:15:28.001781 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/03017320-13cd-4880-80e9-0834cb41a6bd-horizon-secret-key\") pod \"horizon-575755669c-6b5g8\" (UID: \"03017320-13cd-4880-80e9-0834cb41a6bd\") " pod="openstack/horizon-575755669c-6b5g8" Sep 29 19:15:28 crc kubenswrapper[4792]: I0929 19:15:28.004030 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6npsj\" (UniqueName: \"kubernetes.io/projected/03017320-13cd-4880-80e9-0834cb41a6bd-kube-api-access-6npsj\") pod \"horizon-575755669c-6b5g8\" (UID: \"03017320-13cd-4880-80e9-0834cb41a6bd\") " pod="openstack/horizon-575755669c-6b5g8" Sep 29 19:15:28 crc kubenswrapper[4792]: I0929 19:15:28.216735 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-575755669c-6b5g8" Sep 29 19:15:28 crc kubenswrapper[4792]: I0929 19:15:28.957005 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-575755669c-6b5g8"] Sep 29 19:15:28 crc kubenswrapper[4792]: I0929 19:15:28.977276 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"e93ef914-ef1e-40a1-a7d9-5866f9a6e454","Type":"ContainerStarted","Data":"9ec7c8564a4556dbcf9f7aa440a25c204b9b08c4ad019c1752f20177e725f362"} Sep 29 19:15:29 crc kubenswrapper[4792]: I0929 19:15:29.054246 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f9c73d3d-cf62-4d9c-8710-31fab6a99650" path="/var/lib/kubelet/pods/f9c73d3d-cf62-4d9c-8710-31fab6a99650/volumes" Sep 29 19:15:29 crc kubenswrapper[4792]: I0929 19:15:29.580510 4792 scope.go:117] "RemoveContainer" containerID="03401129a7b0b1ffd94aa07f8e244f943742edca57365564fe54787f3dc3337a" Sep 29 19:15:29 crc kubenswrapper[4792]: I0929 19:15:29.987286 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"32da7a07-6996-432d-a9d3-121a2f952b4c","Type":"ContainerStarted","Data":"10a14829912d5ce0bcd8a5cb79adbab5edda76528016781edb2c4edd9bcbb023"} Sep 29 19:15:29 crc kubenswrapper[4792]: I0929 19:15:29.987461 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="32da7a07-6996-432d-a9d3-121a2f952b4c" containerName="glance-log" containerID="cri-o://cc20a227c88234a2e2f6da7a8eecb6bc5259ad27eb3b3bd89926559b7daeb6c2" gracePeriod=30 Sep 29 19:15:29 crc kubenswrapper[4792]: I0929 19:15:29.987589 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="32da7a07-6996-432d-a9d3-121a2f952b4c" containerName="glance-httpd" containerID="cri-o://10a14829912d5ce0bcd8a5cb79adbab5edda76528016781edb2c4edd9bcbb023" gracePeriod=30 Sep 29 19:15:29 crc kubenswrapper[4792]: I0929 19:15:29.995924 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"e93ef914-ef1e-40a1-a7d9-5866f9a6e454","Type":"ContainerStarted","Data":"6eeae45a37103445a6a2f460f1f95dafd6c7ef5bbf1694f76fa12743cf24ee2a"} Sep 29 19:15:29 crc kubenswrapper[4792]: I0929 19:15:29.996081 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="e93ef914-ef1e-40a1-a7d9-5866f9a6e454" containerName="glance-log" containerID="cri-o://9ec7c8564a4556dbcf9f7aa440a25c204b9b08c4ad019c1752f20177e725f362" gracePeriod=30 Sep 29 19:15:29 crc kubenswrapper[4792]: I0929 19:15:29.996304 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="e93ef914-ef1e-40a1-a7d9-5866f9a6e454" containerName="glance-httpd" containerID="cri-o://6eeae45a37103445a6a2f460f1f95dafd6c7ef5bbf1694f76fa12743cf24ee2a" gracePeriod=30 Sep 29 19:15:30 crc kubenswrapper[4792]: I0929 19:15:30.005724 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-575755669c-6b5g8" event={"ID":"03017320-13cd-4880-80e9-0834cb41a6bd","Type":"ContainerStarted","Data":"a0e637e7475014491d3582c41be596b098448686a624731f816a04e2565cefdd"} Sep 29 19:15:30 crc kubenswrapper[4792]: I0929 19:15:30.045703 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=7.045688866 podStartE2EDuration="7.045688866s" podCreationTimestamp="2025-09-29 19:15:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 19:15:30.021928923 +0000 UTC m=+1142.015236329" watchObservedRunningTime="2025-09-29 19:15:30.045688866 +0000 UTC m=+1142.038996262" Sep 29 19:15:30 crc kubenswrapper[4792]: I0929 19:15:30.046363 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=7.046358704 podStartE2EDuration="7.046358704s" podCreationTimestamp="2025-09-29 19:15:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 19:15:30.044368601 +0000 UTC m=+1142.037675997" watchObservedRunningTime="2025-09-29 19:15:30.046358704 +0000 UTC m=+1142.039666100" Sep 29 19:15:30 crc kubenswrapper[4792]: I0929 19:15:30.741272 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Sep 29 19:15:30 crc kubenswrapper[4792]: I0929 19:15:30.882293 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/32da7a07-6996-432d-a9d3-121a2f952b4c-logs\") pod \"32da7a07-6996-432d-a9d3-121a2f952b4c\" (UID: \"32da7a07-6996-432d-a9d3-121a2f952b4c\") " Sep 29 19:15:30 crc kubenswrapper[4792]: I0929 19:15:30.882584 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/32da7a07-6996-432d-a9d3-121a2f952b4c-combined-ca-bundle\") pod \"32da7a07-6996-432d-a9d3-121a2f952b4c\" (UID: \"32da7a07-6996-432d-a9d3-121a2f952b4c\") " Sep 29 19:15:30 crc kubenswrapper[4792]: I0929 19:15:30.882608 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/32da7a07-6996-432d-a9d3-121a2f952b4c-config-data\") pod \"32da7a07-6996-432d-a9d3-121a2f952b4c\" (UID: \"32da7a07-6996-432d-a9d3-121a2f952b4c\") " Sep 29 19:15:30 crc kubenswrapper[4792]: I0929 19:15:30.882706 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/32da7a07-6996-432d-a9d3-121a2f952b4c-public-tls-certs\") pod \"32da7a07-6996-432d-a9d3-121a2f952b4c\" (UID: \"32da7a07-6996-432d-a9d3-121a2f952b4c\") " Sep 29 19:15:30 crc kubenswrapper[4792]: I0929 19:15:30.882775 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/32da7a07-6996-432d-a9d3-121a2f952b4c-httpd-run\") pod \"32da7a07-6996-432d-a9d3-121a2f952b4c\" (UID: \"32da7a07-6996-432d-a9d3-121a2f952b4c\") " Sep 29 19:15:30 crc kubenswrapper[4792]: I0929 19:15:30.882804 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-h5hbg\" (UniqueName: \"kubernetes.io/projected/32da7a07-6996-432d-a9d3-121a2f952b4c-kube-api-access-h5hbg\") pod \"32da7a07-6996-432d-a9d3-121a2f952b4c\" (UID: \"32da7a07-6996-432d-a9d3-121a2f952b4c\") " Sep 29 19:15:30 crc kubenswrapper[4792]: I0929 19:15:30.882900 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/32da7a07-6996-432d-a9d3-121a2f952b4c-scripts\") pod \"32da7a07-6996-432d-a9d3-121a2f952b4c\" (UID: \"32da7a07-6996-432d-a9d3-121a2f952b4c\") " Sep 29 19:15:30 crc kubenswrapper[4792]: I0929 19:15:30.882935 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"32da7a07-6996-432d-a9d3-121a2f952b4c\" (UID: \"32da7a07-6996-432d-a9d3-121a2f952b4c\") " Sep 29 19:15:30 crc kubenswrapper[4792]: I0929 19:15:30.891000 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/32da7a07-6996-432d-a9d3-121a2f952b4c-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "32da7a07-6996-432d-a9d3-121a2f952b4c" (UID: "32da7a07-6996-432d-a9d3-121a2f952b4c"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 19:15:30 crc kubenswrapper[4792]: I0929 19:15:30.891161 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/32da7a07-6996-432d-a9d3-121a2f952b4c-logs" (OuterVolumeSpecName: "logs") pod "32da7a07-6996-432d-a9d3-121a2f952b4c" (UID: "32da7a07-6996-432d-a9d3-121a2f952b4c"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 19:15:30 crc kubenswrapper[4792]: I0929 19:15:30.894678 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage04-crc" (OuterVolumeSpecName: "glance") pod "32da7a07-6996-432d-a9d3-121a2f952b4c" (UID: "32da7a07-6996-432d-a9d3-121a2f952b4c"). InnerVolumeSpecName "local-storage04-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Sep 29 19:15:30 crc kubenswrapper[4792]: I0929 19:15:30.905881 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/32da7a07-6996-432d-a9d3-121a2f952b4c-scripts" (OuterVolumeSpecName: "scripts") pod "32da7a07-6996-432d-a9d3-121a2f952b4c" (UID: "32da7a07-6996-432d-a9d3-121a2f952b4c"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:15:30 crc kubenswrapper[4792]: I0929 19:15:30.906814 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/32da7a07-6996-432d-a9d3-121a2f952b4c-kube-api-access-h5hbg" (OuterVolumeSpecName: "kube-api-access-h5hbg") pod "32da7a07-6996-432d-a9d3-121a2f952b4c" (UID: "32da7a07-6996-432d-a9d3-121a2f952b4c"). InnerVolumeSpecName "kube-api-access-h5hbg". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:15:30 crc kubenswrapper[4792]: I0929 19:15:30.924922 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/32da7a07-6996-432d-a9d3-121a2f952b4c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "32da7a07-6996-432d-a9d3-121a2f952b4c" (UID: "32da7a07-6996-432d-a9d3-121a2f952b4c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:15:30 crc kubenswrapper[4792]: I0929 19:15:30.944020 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/32da7a07-6996-432d-a9d3-121a2f952b4c-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "32da7a07-6996-432d-a9d3-121a2f952b4c" (UID: "32da7a07-6996-432d-a9d3-121a2f952b4c"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:15:30 crc kubenswrapper[4792]: I0929 19:15:30.954067 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/32da7a07-6996-432d-a9d3-121a2f952b4c-config-data" (OuterVolumeSpecName: "config-data") pod "32da7a07-6996-432d-a9d3-121a2f952b4c" (UID: "32da7a07-6996-432d-a9d3-121a2f952b4c"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:15:30 crc kubenswrapper[4792]: I0929 19:15:30.985474 4792 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/32da7a07-6996-432d-a9d3-121a2f952b4c-public-tls-certs\") on node \"crc\" DevicePath \"\"" Sep 29 19:15:30 crc kubenswrapper[4792]: I0929 19:15:30.985521 4792 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/32da7a07-6996-432d-a9d3-121a2f952b4c-httpd-run\") on node \"crc\" DevicePath \"\"" Sep 29 19:15:30 crc kubenswrapper[4792]: I0929 19:15:30.985535 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-h5hbg\" (UniqueName: \"kubernetes.io/projected/32da7a07-6996-432d-a9d3-121a2f952b4c-kube-api-access-h5hbg\") on node \"crc\" DevicePath \"\"" Sep 29 19:15:30 crc kubenswrapper[4792]: I0929 19:15:30.985548 4792 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/32da7a07-6996-432d-a9d3-121a2f952b4c-scripts\") on node \"crc\" DevicePath \"\"" Sep 29 19:15:30 crc kubenswrapper[4792]: I0929 19:15:30.985584 4792 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") on node \"crc\" " Sep 29 19:15:30 crc kubenswrapper[4792]: I0929 19:15:30.985596 4792 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/32da7a07-6996-432d-a9d3-121a2f952b4c-logs\") on node \"crc\" DevicePath \"\"" Sep 29 19:15:30 crc kubenswrapper[4792]: I0929 19:15:30.985648 4792 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/32da7a07-6996-432d-a9d3-121a2f952b4c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 19:15:30 crc kubenswrapper[4792]: I0929 19:15:30.985665 4792 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/32da7a07-6996-432d-a9d3-121a2f952b4c-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 19:15:31 crc kubenswrapper[4792]: I0929 19:15:31.009751 4792 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage04-crc" (UniqueName: "kubernetes.io/local-volume/local-storage04-crc") on node "crc" Sep 29 19:15:31 crc kubenswrapper[4792]: I0929 19:15:31.035542 4792 generic.go:334] "Generic (PLEG): container finished" podID="e93ef914-ef1e-40a1-a7d9-5866f9a6e454" containerID="6eeae45a37103445a6a2f460f1f95dafd6c7ef5bbf1694f76fa12743cf24ee2a" exitCode=0 Sep 29 19:15:31 crc kubenswrapper[4792]: I0929 19:15:31.035574 4792 generic.go:334] "Generic (PLEG): container finished" podID="e93ef914-ef1e-40a1-a7d9-5866f9a6e454" containerID="9ec7c8564a4556dbcf9f7aa440a25c204b9b08c4ad019c1752f20177e725f362" exitCode=143 Sep 29 19:15:31 crc kubenswrapper[4792]: I0929 19:15:31.036819 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"e93ef914-ef1e-40a1-a7d9-5866f9a6e454","Type":"ContainerDied","Data":"6eeae45a37103445a6a2f460f1f95dafd6c7ef5bbf1694f76fa12743cf24ee2a"} Sep 29 19:15:31 crc kubenswrapper[4792]: I0929 19:15:31.036956 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"e93ef914-ef1e-40a1-a7d9-5866f9a6e454","Type":"ContainerDied","Data":"9ec7c8564a4556dbcf9f7aa440a25c204b9b08c4ad019c1752f20177e725f362"} Sep 29 19:15:31 crc kubenswrapper[4792]: I0929 19:15:31.042282 4792 generic.go:334] "Generic (PLEG): container finished" podID="32da7a07-6996-432d-a9d3-121a2f952b4c" containerID="10a14829912d5ce0bcd8a5cb79adbab5edda76528016781edb2c4edd9bcbb023" exitCode=0 Sep 29 19:15:31 crc kubenswrapper[4792]: I0929 19:15:31.042318 4792 generic.go:334] "Generic (PLEG): container finished" podID="32da7a07-6996-432d-a9d3-121a2f952b4c" containerID="cc20a227c88234a2e2f6da7a8eecb6bc5259ad27eb3b3bd89926559b7daeb6c2" exitCode=143 Sep 29 19:15:31 crc kubenswrapper[4792]: I0929 19:15:31.042339 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"32da7a07-6996-432d-a9d3-121a2f952b4c","Type":"ContainerDied","Data":"10a14829912d5ce0bcd8a5cb79adbab5edda76528016781edb2c4edd9bcbb023"} Sep 29 19:15:31 crc kubenswrapper[4792]: I0929 19:15:31.042361 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"32da7a07-6996-432d-a9d3-121a2f952b4c","Type":"ContainerDied","Data":"cc20a227c88234a2e2f6da7a8eecb6bc5259ad27eb3b3bd89926559b7daeb6c2"} Sep 29 19:15:31 crc kubenswrapper[4792]: I0929 19:15:31.042426 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"32da7a07-6996-432d-a9d3-121a2f952b4c","Type":"ContainerDied","Data":"d5cb41a1b17efeaa36fc38084d713f967eb3cc0508ade4b1039559054963f57c"} Sep 29 19:15:31 crc kubenswrapper[4792]: I0929 19:15:31.042444 4792 scope.go:117] "RemoveContainer" containerID="10a14829912d5ce0bcd8a5cb79adbab5edda76528016781edb2c4edd9bcbb023" Sep 29 19:15:31 crc kubenswrapper[4792]: I0929 19:15:31.042586 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Sep 29 19:15:31 crc kubenswrapper[4792]: I0929 19:15:31.087771 4792 reconciler_common.go:293] "Volume detached for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") on node \"crc\" DevicePath \"\"" Sep 29 19:15:31 crc kubenswrapper[4792]: I0929 19:15:31.103983 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Sep 29 19:15:31 crc kubenswrapper[4792]: I0929 19:15:31.130912 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-external-api-0"] Sep 29 19:15:31 crc kubenswrapper[4792]: I0929 19:15:31.149908 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Sep 29 19:15:31 crc kubenswrapper[4792]: E0929 19:15:31.150314 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="32da7a07-6996-432d-a9d3-121a2f952b4c" containerName="glance-log" Sep 29 19:15:31 crc kubenswrapper[4792]: I0929 19:15:31.150328 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="32da7a07-6996-432d-a9d3-121a2f952b4c" containerName="glance-log" Sep 29 19:15:31 crc kubenswrapper[4792]: E0929 19:15:31.150356 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="32da7a07-6996-432d-a9d3-121a2f952b4c" containerName="glance-httpd" Sep 29 19:15:31 crc kubenswrapper[4792]: I0929 19:15:31.150363 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="32da7a07-6996-432d-a9d3-121a2f952b4c" containerName="glance-httpd" Sep 29 19:15:31 crc kubenswrapper[4792]: I0929 19:15:31.150539 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="32da7a07-6996-432d-a9d3-121a2f952b4c" containerName="glance-log" Sep 29 19:15:31 crc kubenswrapper[4792]: I0929 19:15:31.150555 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="32da7a07-6996-432d-a9d3-121a2f952b4c" containerName="glance-httpd" Sep 29 19:15:31 crc kubenswrapper[4792]: I0929 19:15:31.151477 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Sep 29 19:15:31 crc kubenswrapper[4792]: I0929 19:15:31.156427 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Sep 29 19:15:31 crc kubenswrapper[4792]: I0929 19:15:31.157069 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-public-svc" Sep 29 19:15:31 crc kubenswrapper[4792]: I0929 19:15:31.160986 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Sep 29 19:15:31 crc kubenswrapper[4792]: I0929 19:15:31.291116 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/5ca97462-e055-4294-856f-895f0be44759-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"5ca97462-e055-4294-856f-895f0be44759\") " pod="openstack/glance-default-external-api-0" Sep 29 19:15:31 crc kubenswrapper[4792]: I0929 19:15:31.291197 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5ca97462-e055-4294-856f-895f0be44759-logs\") pod \"glance-default-external-api-0\" (UID: \"5ca97462-e055-4294-856f-895f0be44759\") " pod="openstack/glance-default-external-api-0" Sep 29 19:15:31 crc kubenswrapper[4792]: I0929 19:15:31.291226 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/5ca97462-e055-4294-856f-895f0be44759-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"5ca97462-e055-4294-856f-895f0be44759\") " pod="openstack/glance-default-external-api-0" Sep 29 19:15:31 crc kubenswrapper[4792]: I0929 19:15:31.291246 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"glance-default-external-api-0\" (UID: \"5ca97462-e055-4294-856f-895f0be44759\") " pod="openstack/glance-default-external-api-0" Sep 29 19:15:31 crc kubenswrapper[4792]: I0929 19:15:31.291277 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bzqgm\" (UniqueName: \"kubernetes.io/projected/5ca97462-e055-4294-856f-895f0be44759-kube-api-access-bzqgm\") pod \"glance-default-external-api-0\" (UID: \"5ca97462-e055-4294-856f-895f0be44759\") " pod="openstack/glance-default-external-api-0" Sep 29 19:15:31 crc kubenswrapper[4792]: I0929 19:15:31.291297 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5ca97462-e055-4294-856f-895f0be44759-scripts\") pod \"glance-default-external-api-0\" (UID: \"5ca97462-e055-4294-856f-895f0be44759\") " pod="openstack/glance-default-external-api-0" Sep 29 19:15:31 crc kubenswrapper[4792]: I0929 19:15:31.291326 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5ca97462-e055-4294-856f-895f0be44759-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"5ca97462-e055-4294-856f-895f0be44759\") " pod="openstack/glance-default-external-api-0" Sep 29 19:15:31 crc kubenswrapper[4792]: I0929 19:15:31.291548 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5ca97462-e055-4294-856f-895f0be44759-config-data\") pod \"glance-default-external-api-0\" (UID: \"5ca97462-e055-4294-856f-895f0be44759\") " pod="openstack/glance-default-external-api-0" Sep 29 19:15:31 crc kubenswrapper[4792]: I0929 19:15:31.393518 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/5ca97462-e055-4294-856f-895f0be44759-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"5ca97462-e055-4294-856f-895f0be44759\") " pod="openstack/glance-default-external-api-0" Sep 29 19:15:31 crc kubenswrapper[4792]: I0929 19:15:31.393595 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5ca97462-e055-4294-856f-895f0be44759-logs\") pod \"glance-default-external-api-0\" (UID: \"5ca97462-e055-4294-856f-895f0be44759\") " pod="openstack/glance-default-external-api-0" Sep 29 19:15:31 crc kubenswrapper[4792]: I0929 19:15:31.393639 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/5ca97462-e055-4294-856f-895f0be44759-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"5ca97462-e055-4294-856f-895f0be44759\") " pod="openstack/glance-default-external-api-0" Sep 29 19:15:31 crc kubenswrapper[4792]: I0929 19:15:31.393659 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"glance-default-external-api-0\" (UID: \"5ca97462-e055-4294-856f-895f0be44759\") " pod="openstack/glance-default-external-api-0" Sep 29 19:15:31 crc kubenswrapper[4792]: I0929 19:15:31.393705 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bzqgm\" (UniqueName: \"kubernetes.io/projected/5ca97462-e055-4294-856f-895f0be44759-kube-api-access-bzqgm\") pod \"glance-default-external-api-0\" (UID: \"5ca97462-e055-4294-856f-895f0be44759\") " pod="openstack/glance-default-external-api-0" Sep 29 19:15:31 crc kubenswrapper[4792]: I0929 19:15:31.393729 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5ca97462-e055-4294-856f-895f0be44759-scripts\") pod \"glance-default-external-api-0\" (UID: \"5ca97462-e055-4294-856f-895f0be44759\") " pod="openstack/glance-default-external-api-0" Sep 29 19:15:31 crc kubenswrapper[4792]: I0929 19:15:31.393758 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5ca97462-e055-4294-856f-895f0be44759-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"5ca97462-e055-4294-856f-895f0be44759\") " pod="openstack/glance-default-external-api-0" Sep 29 19:15:31 crc kubenswrapper[4792]: I0929 19:15:31.393808 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5ca97462-e055-4294-856f-895f0be44759-config-data\") pod \"glance-default-external-api-0\" (UID: \"5ca97462-e055-4294-856f-895f0be44759\") " pod="openstack/glance-default-external-api-0" Sep 29 19:15:31 crc kubenswrapper[4792]: I0929 19:15:31.394337 4792 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"glance-default-external-api-0\" (UID: \"5ca97462-e055-4294-856f-895f0be44759\") device mount path \"/mnt/openstack/pv04\"" pod="openstack/glance-default-external-api-0" Sep 29 19:15:31 crc kubenswrapper[4792]: I0929 19:15:31.394616 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5ca97462-e055-4294-856f-895f0be44759-logs\") pod \"glance-default-external-api-0\" (UID: \"5ca97462-e055-4294-856f-895f0be44759\") " pod="openstack/glance-default-external-api-0" Sep 29 19:15:31 crc kubenswrapper[4792]: I0929 19:15:31.395165 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/5ca97462-e055-4294-856f-895f0be44759-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"5ca97462-e055-4294-856f-895f0be44759\") " pod="openstack/glance-default-external-api-0" Sep 29 19:15:31 crc kubenswrapper[4792]: I0929 19:15:31.404680 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5ca97462-e055-4294-856f-895f0be44759-scripts\") pod \"glance-default-external-api-0\" (UID: \"5ca97462-e055-4294-856f-895f0be44759\") " pod="openstack/glance-default-external-api-0" Sep 29 19:15:31 crc kubenswrapper[4792]: I0929 19:15:31.405723 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5ca97462-e055-4294-856f-895f0be44759-config-data\") pod \"glance-default-external-api-0\" (UID: \"5ca97462-e055-4294-856f-895f0be44759\") " pod="openstack/glance-default-external-api-0" Sep 29 19:15:31 crc kubenswrapper[4792]: I0929 19:15:31.427554 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5ca97462-e055-4294-856f-895f0be44759-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"5ca97462-e055-4294-856f-895f0be44759\") " pod="openstack/glance-default-external-api-0" Sep 29 19:15:31 crc kubenswrapper[4792]: I0929 19:15:31.428063 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/5ca97462-e055-4294-856f-895f0be44759-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"5ca97462-e055-4294-856f-895f0be44759\") " pod="openstack/glance-default-external-api-0" Sep 29 19:15:31 crc kubenswrapper[4792]: I0929 19:15:31.429504 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"glance-default-external-api-0\" (UID: \"5ca97462-e055-4294-856f-895f0be44759\") " pod="openstack/glance-default-external-api-0" Sep 29 19:15:31 crc kubenswrapper[4792]: I0929 19:15:31.434810 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bzqgm\" (UniqueName: \"kubernetes.io/projected/5ca97462-e055-4294-856f-895f0be44759-kube-api-access-bzqgm\") pod \"glance-default-external-api-0\" (UID: \"5ca97462-e055-4294-856f-895f0be44759\") " pod="openstack/glance-default-external-api-0" Sep 29 19:15:31 crc kubenswrapper[4792]: I0929 19:15:31.501797 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Sep 29 19:15:32 crc kubenswrapper[4792]: I0929 19:15:32.081234 4792 generic.go:334] "Generic (PLEG): container finished" podID="e5b356b4-9ba6-4971-966f-be9d4160b57d" containerID="d72f423cf06a89a52dbdb61789a4ada9f3ad5c2e26307d79817fdcd764d326aa" exitCode=0 Sep 29 19:15:32 crc kubenswrapper[4792]: I0929 19:15:32.081475 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-5dhfl" event={"ID":"e5b356b4-9ba6-4971-966f-be9d4160b57d","Type":"ContainerDied","Data":"d72f423cf06a89a52dbdb61789a4ada9f3ad5c2e26307d79817fdcd764d326aa"} Sep 29 19:15:32 crc kubenswrapper[4792]: I0929 19:15:32.569018 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-79f9946749-wjkgr"] Sep 29 19:15:32 crc kubenswrapper[4792]: I0929 19:15:32.595589 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-dfd9c6b56-wq84c"] Sep 29 19:15:32 crc kubenswrapper[4792]: I0929 19:15:32.599489 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-dfd9c6b56-wq84c" Sep 29 19:15:32 crc kubenswrapper[4792]: I0929 19:15:32.605250 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-horizon-svc" Sep 29 19:15:32 crc kubenswrapper[4792]: I0929 19:15:32.616892 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Sep 29 19:15:32 crc kubenswrapper[4792]: I0929 19:15:32.636684 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-dfd9c6b56-wq84c"] Sep 29 19:15:32 crc kubenswrapper[4792]: I0929 19:15:32.697625 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-575755669c-6b5g8"] Sep 29 19:15:32 crc kubenswrapper[4792]: I0929 19:15:32.730277 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-8494dffd6-7rx5p"] Sep 29 19:15:32 crc kubenswrapper[4792]: I0929 19:15:32.739027 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/eb79e1ef-cf0c-407b-9b37-c7ad8d65a3cc-scripts\") pod \"horizon-dfd9c6b56-wq84c\" (UID: \"eb79e1ef-cf0c-407b-9b37-c7ad8d65a3cc\") " pod="openstack/horizon-dfd9c6b56-wq84c" Sep 29 19:15:32 crc kubenswrapper[4792]: I0929 19:15:32.739413 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/eb79e1ef-cf0c-407b-9b37-c7ad8d65a3cc-combined-ca-bundle\") pod \"horizon-dfd9c6b56-wq84c\" (UID: \"eb79e1ef-cf0c-407b-9b37-c7ad8d65a3cc\") " pod="openstack/horizon-dfd9c6b56-wq84c" Sep 29 19:15:32 crc kubenswrapper[4792]: I0929 19:15:32.739506 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/eb79e1ef-cf0c-407b-9b37-c7ad8d65a3cc-horizon-secret-key\") pod \"horizon-dfd9c6b56-wq84c\" (UID: \"eb79e1ef-cf0c-407b-9b37-c7ad8d65a3cc\") " pod="openstack/horizon-dfd9c6b56-wq84c" Sep 29 19:15:32 crc kubenswrapper[4792]: I0929 19:15:32.739623 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vctj4\" (UniqueName: \"kubernetes.io/projected/eb79e1ef-cf0c-407b-9b37-c7ad8d65a3cc-kube-api-access-vctj4\") pod \"horizon-dfd9c6b56-wq84c\" (UID: \"eb79e1ef-cf0c-407b-9b37-c7ad8d65a3cc\") " pod="openstack/horizon-dfd9c6b56-wq84c" Sep 29 19:15:32 crc kubenswrapper[4792]: I0929 19:15:32.739826 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/eb79e1ef-cf0c-407b-9b37-c7ad8d65a3cc-config-data\") pod \"horizon-dfd9c6b56-wq84c\" (UID: \"eb79e1ef-cf0c-407b-9b37-c7ad8d65a3cc\") " pod="openstack/horizon-dfd9c6b56-wq84c" Sep 29 19:15:32 crc kubenswrapper[4792]: I0929 19:15:32.739937 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/eb79e1ef-cf0c-407b-9b37-c7ad8d65a3cc-horizon-tls-certs\") pod \"horizon-dfd9c6b56-wq84c\" (UID: \"eb79e1ef-cf0c-407b-9b37-c7ad8d65a3cc\") " pod="openstack/horizon-dfd9c6b56-wq84c" Sep 29 19:15:32 crc kubenswrapper[4792]: I0929 19:15:32.740064 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/eb79e1ef-cf0c-407b-9b37-c7ad8d65a3cc-logs\") pod \"horizon-dfd9c6b56-wq84c\" (UID: \"eb79e1ef-cf0c-407b-9b37-c7ad8d65a3cc\") " pod="openstack/horizon-dfd9c6b56-wq84c" Sep 29 19:15:32 crc kubenswrapper[4792]: I0929 19:15:32.739990 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-8494dffd6-7rx5p" Sep 29 19:15:32 crc kubenswrapper[4792]: I0929 19:15:32.761531 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-8494dffd6-7rx5p"] Sep 29 19:15:32 crc kubenswrapper[4792]: I0929 19:15:32.842044 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q65rt\" (UniqueName: \"kubernetes.io/projected/23845288-b122-49f0-b10d-641cfb94b66f-kube-api-access-q65rt\") pod \"horizon-8494dffd6-7rx5p\" (UID: \"23845288-b122-49f0-b10d-641cfb94b66f\") " pod="openstack/horizon-8494dffd6-7rx5p" Sep 29 19:15:32 crc kubenswrapper[4792]: I0929 19:15:32.842131 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/23845288-b122-49f0-b10d-641cfb94b66f-logs\") pod \"horizon-8494dffd6-7rx5p\" (UID: \"23845288-b122-49f0-b10d-641cfb94b66f\") " pod="openstack/horizon-8494dffd6-7rx5p" Sep 29 19:15:32 crc kubenswrapper[4792]: I0929 19:15:32.842156 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/23845288-b122-49f0-b10d-641cfb94b66f-combined-ca-bundle\") pod \"horizon-8494dffd6-7rx5p\" (UID: \"23845288-b122-49f0-b10d-641cfb94b66f\") " pod="openstack/horizon-8494dffd6-7rx5p" Sep 29 19:15:32 crc kubenswrapper[4792]: I0929 19:15:32.842211 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/eb79e1ef-cf0c-407b-9b37-c7ad8d65a3cc-config-data\") pod \"horizon-dfd9c6b56-wq84c\" (UID: \"eb79e1ef-cf0c-407b-9b37-c7ad8d65a3cc\") " pod="openstack/horizon-dfd9c6b56-wq84c" Sep 29 19:15:32 crc kubenswrapper[4792]: I0929 19:15:32.842242 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/eb79e1ef-cf0c-407b-9b37-c7ad8d65a3cc-horizon-tls-certs\") pod \"horizon-dfd9c6b56-wq84c\" (UID: \"eb79e1ef-cf0c-407b-9b37-c7ad8d65a3cc\") " pod="openstack/horizon-dfd9c6b56-wq84c" Sep 29 19:15:32 crc kubenswrapper[4792]: I0929 19:15:32.842267 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/eb79e1ef-cf0c-407b-9b37-c7ad8d65a3cc-logs\") pod \"horizon-dfd9c6b56-wq84c\" (UID: \"eb79e1ef-cf0c-407b-9b37-c7ad8d65a3cc\") " pod="openstack/horizon-dfd9c6b56-wq84c" Sep 29 19:15:32 crc kubenswrapper[4792]: I0929 19:15:32.842291 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/23845288-b122-49f0-b10d-641cfb94b66f-scripts\") pod \"horizon-8494dffd6-7rx5p\" (UID: \"23845288-b122-49f0-b10d-641cfb94b66f\") " pod="openstack/horizon-8494dffd6-7rx5p" Sep 29 19:15:32 crc kubenswrapper[4792]: I0929 19:15:32.842338 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/eb79e1ef-cf0c-407b-9b37-c7ad8d65a3cc-scripts\") pod \"horizon-dfd9c6b56-wq84c\" (UID: \"eb79e1ef-cf0c-407b-9b37-c7ad8d65a3cc\") " pod="openstack/horizon-dfd9c6b56-wq84c" Sep 29 19:15:32 crc kubenswrapper[4792]: I0929 19:15:32.842369 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/23845288-b122-49f0-b10d-641cfb94b66f-horizon-secret-key\") pod \"horizon-8494dffd6-7rx5p\" (UID: \"23845288-b122-49f0-b10d-641cfb94b66f\") " pod="openstack/horizon-8494dffd6-7rx5p" Sep 29 19:15:32 crc kubenswrapper[4792]: I0929 19:15:32.842399 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/23845288-b122-49f0-b10d-641cfb94b66f-horizon-tls-certs\") pod \"horizon-8494dffd6-7rx5p\" (UID: \"23845288-b122-49f0-b10d-641cfb94b66f\") " pod="openstack/horizon-8494dffd6-7rx5p" Sep 29 19:15:32 crc kubenswrapper[4792]: I0929 19:15:32.842423 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/eb79e1ef-cf0c-407b-9b37-c7ad8d65a3cc-combined-ca-bundle\") pod \"horizon-dfd9c6b56-wq84c\" (UID: \"eb79e1ef-cf0c-407b-9b37-c7ad8d65a3cc\") " pod="openstack/horizon-dfd9c6b56-wq84c" Sep 29 19:15:32 crc kubenswrapper[4792]: I0929 19:15:32.842447 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/eb79e1ef-cf0c-407b-9b37-c7ad8d65a3cc-horizon-secret-key\") pod \"horizon-dfd9c6b56-wq84c\" (UID: \"eb79e1ef-cf0c-407b-9b37-c7ad8d65a3cc\") " pod="openstack/horizon-dfd9c6b56-wq84c" Sep 29 19:15:32 crc kubenswrapper[4792]: I0929 19:15:32.842473 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/23845288-b122-49f0-b10d-641cfb94b66f-config-data\") pod \"horizon-8494dffd6-7rx5p\" (UID: \"23845288-b122-49f0-b10d-641cfb94b66f\") " pod="openstack/horizon-8494dffd6-7rx5p" Sep 29 19:15:32 crc kubenswrapper[4792]: I0929 19:15:32.842502 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vctj4\" (UniqueName: \"kubernetes.io/projected/eb79e1ef-cf0c-407b-9b37-c7ad8d65a3cc-kube-api-access-vctj4\") pod \"horizon-dfd9c6b56-wq84c\" (UID: \"eb79e1ef-cf0c-407b-9b37-c7ad8d65a3cc\") " pod="openstack/horizon-dfd9c6b56-wq84c" Sep 29 19:15:32 crc kubenswrapper[4792]: I0929 19:15:32.845331 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/eb79e1ef-cf0c-407b-9b37-c7ad8d65a3cc-config-data\") pod \"horizon-dfd9c6b56-wq84c\" (UID: \"eb79e1ef-cf0c-407b-9b37-c7ad8d65a3cc\") " pod="openstack/horizon-dfd9c6b56-wq84c" Sep 29 19:15:32 crc kubenswrapper[4792]: I0929 19:15:32.845909 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/eb79e1ef-cf0c-407b-9b37-c7ad8d65a3cc-scripts\") pod \"horizon-dfd9c6b56-wq84c\" (UID: \"eb79e1ef-cf0c-407b-9b37-c7ad8d65a3cc\") " pod="openstack/horizon-dfd9c6b56-wq84c" Sep 29 19:15:32 crc kubenswrapper[4792]: I0929 19:15:32.847062 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/eb79e1ef-cf0c-407b-9b37-c7ad8d65a3cc-logs\") pod \"horizon-dfd9c6b56-wq84c\" (UID: \"eb79e1ef-cf0c-407b-9b37-c7ad8d65a3cc\") " pod="openstack/horizon-dfd9c6b56-wq84c" Sep 29 19:15:32 crc kubenswrapper[4792]: I0929 19:15:32.857286 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/eb79e1ef-cf0c-407b-9b37-c7ad8d65a3cc-horizon-secret-key\") pod \"horizon-dfd9c6b56-wq84c\" (UID: \"eb79e1ef-cf0c-407b-9b37-c7ad8d65a3cc\") " pod="openstack/horizon-dfd9c6b56-wq84c" Sep 29 19:15:32 crc kubenswrapper[4792]: I0929 19:15:32.857982 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/eb79e1ef-cf0c-407b-9b37-c7ad8d65a3cc-combined-ca-bundle\") pod \"horizon-dfd9c6b56-wq84c\" (UID: \"eb79e1ef-cf0c-407b-9b37-c7ad8d65a3cc\") " pod="openstack/horizon-dfd9c6b56-wq84c" Sep 29 19:15:32 crc kubenswrapper[4792]: I0929 19:15:32.858637 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/eb79e1ef-cf0c-407b-9b37-c7ad8d65a3cc-horizon-tls-certs\") pod \"horizon-dfd9c6b56-wq84c\" (UID: \"eb79e1ef-cf0c-407b-9b37-c7ad8d65a3cc\") " pod="openstack/horizon-dfd9c6b56-wq84c" Sep 29 19:15:32 crc kubenswrapper[4792]: I0929 19:15:32.864247 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vctj4\" (UniqueName: \"kubernetes.io/projected/eb79e1ef-cf0c-407b-9b37-c7ad8d65a3cc-kube-api-access-vctj4\") pod \"horizon-dfd9c6b56-wq84c\" (UID: \"eb79e1ef-cf0c-407b-9b37-c7ad8d65a3cc\") " pod="openstack/horizon-dfd9c6b56-wq84c" Sep 29 19:15:32 crc kubenswrapper[4792]: I0929 19:15:32.943642 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/23845288-b122-49f0-b10d-641cfb94b66f-logs\") pod \"horizon-8494dffd6-7rx5p\" (UID: \"23845288-b122-49f0-b10d-641cfb94b66f\") " pod="openstack/horizon-8494dffd6-7rx5p" Sep 29 19:15:32 crc kubenswrapper[4792]: I0929 19:15:32.943688 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/23845288-b122-49f0-b10d-641cfb94b66f-combined-ca-bundle\") pod \"horizon-8494dffd6-7rx5p\" (UID: \"23845288-b122-49f0-b10d-641cfb94b66f\") " pod="openstack/horizon-8494dffd6-7rx5p" Sep 29 19:15:32 crc kubenswrapper[4792]: I0929 19:15:32.944153 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/23845288-b122-49f0-b10d-641cfb94b66f-logs\") pod \"horizon-8494dffd6-7rx5p\" (UID: \"23845288-b122-49f0-b10d-641cfb94b66f\") " pod="openstack/horizon-8494dffd6-7rx5p" Sep 29 19:15:32 crc kubenswrapper[4792]: I0929 19:15:32.944612 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/23845288-b122-49f0-b10d-641cfb94b66f-scripts\") pod \"horizon-8494dffd6-7rx5p\" (UID: \"23845288-b122-49f0-b10d-641cfb94b66f\") " pod="openstack/horizon-8494dffd6-7rx5p" Sep 29 19:15:32 crc kubenswrapper[4792]: I0929 19:15:32.944667 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/23845288-b122-49f0-b10d-641cfb94b66f-horizon-tls-certs\") pod \"horizon-8494dffd6-7rx5p\" (UID: \"23845288-b122-49f0-b10d-641cfb94b66f\") " pod="openstack/horizon-8494dffd6-7rx5p" Sep 29 19:15:32 crc kubenswrapper[4792]: I0929 19:15:32.944703 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/23845288-b122-49f0-b10d-641cfb94b66f-horizon-secret-key\") pod \"horizon-8494dffd6-7rx5p\" (UID: \"23845288-b122-49f0-b10d-641cfb94b66f\") " pod="openstack/horizon-8494dffd6-7rx5p" Sep 29 19:15:32 crc kubenswrapper[4792]: I0929 19:15:32.944726 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/23845288-b122-49f0-b10d-641cfb94b66f-config-data\") pod \"horizon-8494dffd6-7rx5p\" (UID: \"23845288-b122-49f0-b10d-641cfb94b66f\") " pod="openstack/horizon-8494dffd6-7rx5p" Sep 29 19:15:32 crc kubenswrapper[4792]: I0929 19:15:32.944862 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q65rt\" (UniqueName: \"kubernetes.io/projected/23845288-b122-49f0-b10d-641cfb94b66f-kube-api-access-q65rt\") pod \"horizon-8494dffd6-7rx5p\" (UID: \"23845288-b122-49f0-b10d-641cfb94b66f\") " pod="openstack/horizon-8494dffd6-7rx5p" Sep 29 19:15:32 crc kubenswrapper[4792]: I0929 19:15:32.945199 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/23845288-b122-49f0-b10d-641cfb94b66f-scripts\") pod \"horizon-8494dffd6-7rx5p\" (UID: \"23845288-b122-49f0-b10d-641cfb94b66f\") " pod="openstack/horizon-8494dffd6-7rx5p" Sep 29 19:15:32 crc kubenswrapper[4792]: I0929 19:15:32.946506 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/23845288-b122-49f0-b10d-641cfb94b66f-config-data\") pod \"horizon-8494dffd6-7rx5p\" (UID: \"23845288-b122-49f0-b10d-641cfb94b66f\") " pod="openstack/horizon-8494dffd6-7rx5p" Sep 29 19:15:32 crc kubenswrapper[4792]: I0929 19:15:32.948570 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/23845288-b122-49f0-b10d-641cfb94b66f-horizon-tls-certs\") pod \"horizon-8494dffd6-7rx5p\" (UID: \"23845288-b122-49f0-b10d-641cfb94b66f\") " pod="openstack/horizon-8494dffd6-7rx5p" Sep 29 19:15:32 crc kubenswrapper[4792]: I0929 19:15:32.949532 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/23845288-b122-49f0-b10d-641cfb94b66f-combined-ca-bundle\") pod \"horizon-8494dffd6-7rx5p\" (UID: \"23845288-b122-49f0-b10d-641cfb94b66f\") " pod="openstack/horizon-8494dffd6-7rx5p" Sep 29 19:15:32 crc kubenswrapper[4792]: I0929 19:15:32.961539 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-dfd9c6b56-wq84c" Sep 29 19:15:32 crc kubenswrapper[4792]: I0929 19:15:32.963072 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q65rt\" (UniqueName: \"kubernetes.io/projected/23845288-b122-49f0-b10d-641cfb94b66f-kube-api-access-q65rt\") pod \"horizon-8494dffd6-7rx5p\" (UID: \"23845288-b122-49f0-b10d-641cfb94b66f\") " pod="openstack/horizon-8494dffd6-7rx5p" Sep 29 19:15:32 crc kubenswrapper[4792]: I0929 19:15:32.963181 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/23845288-b122-49f0-b10d-641cfb94b66f-horizon-secret-key\") pod \"horizon-8494dffd6-7rx5p\" (UID: \"23845288-b122-49f0-b10d-641cfb94b66f\") " pod="openstack/horizon-8494dffd6-7rx5p" Sep 29 19:15:33 crc kubenswrapper[4792]: I0929 19:15:33.058007 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="32da7a07-6996-432d-a9d3-121a2f952b4c" path="/var/lib/kubelet/pods/32da7a07-6996-432d-a9d3-121a2f952b4c/volumes" Sep 29 19:15:33 crc kubenswrapper[4792]: I0929 19:15:33.077814 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-8494dffd6-7rx5p" Sep 29 19:15:33 crc kubenswrapper[4792]: I0929 19:15:33.951736 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-56df8fb6b7-rh9hv" Sep 29 19:15:34 crc kubenswrapper[4792]: I0929 19:15:34.024924 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-b8fbc5445-mgs5n"] Sep 29 19:15:34 crc kubenswrapper[4792]: I0929 19:15:34.026343 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-b8fbc5445-mgs5n" podUID="e4b273af-3752-4d48-ae80-4d639b06e836" containerName="dnsmasq-dns" containerID="cri-o://9916413a2cc81dab7505eced696984a99cc43dd3bc6234a6ccf8a181a76593a2" gracePeriod=10 Sep 29 19:15:35 crc kubenswrapper[4792]: I0929 19:15:35.126782 4792 generic.go:334] "Generic (PLEG): container finished" podID="e4b273af-3752-4d48-ae80-4d639b06e836" containerID="9916413a2cc81dab7505eced696984a99cc43dd3bc6234a6ccf8a181a76593a2" exitCode=0 Sep 29 19:15:35 crc kubenswrapper[4792]: I0929 19:15:35.127034 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-b8fbc5445-mgs5n" event={"ID":"e4b273af-3752-4d48-ae80-4d639b06e836","Type":"ContainerDied","Data":"9916413a2cc81dab7505eced696984a99cc43dd3bc6234a6ccf8a181a76593a2"} Sep 29 19:15:36 crc kubenswrapper[4792]: I0929 19:15:36.905084 4792 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-b8fbc5445-mgs5n" podUID="e4b273af-3752-4d48-ae80-4d639b06e836" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.117:5353: connect: connection refused" Sep 29 19:15:37 crc kubenswrapper[4792]: I0929 19:15:37.757517 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-5dhfl" Sep 29 19:15:37 crc kubenswrapper[4792]: I0929 19:15:37.856244 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/e5b356b4-9ba6-4971-966f-be9d4160b57d-credential-keys\") pod \"e5b356b4-9ba6-4971-966f-be9d4160b57d\" (UID: \"e5b356b4-9ba6-4971-966f-be9d4160b57d\") " Sep 29 19:15:37 crc kubenswrapper[4792]: I0929 19:15:37.856579 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e5b356b4-9ba6-4971-966f-be9d4160b57d-scripts\") pod \"e5b356b4-9ba6-4971-966f-be9d4160b57d\" (UID: \"e5b356b4-9ba6-4971-966f-be9d4160b57d\") " Sep 29 19:15:37 crc kubenswrapper[4792]: I0929 19:15:37.856671 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e5b356b4-9ba6-4971-966f-be9d4160b57d-config-data\") pod \"e5b356b4-9ba6-4971-966f-be9d4160b57d\" (UID: \"e5b356b4-9ba6-4971-966f-be9d4160b57d\") " Sep 29 19:15:37 crc kubenswrapper[4792]: I0929 19:15:37.856726 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e5b356b4-9ba6-4971-966f-be9d4160b57d-combined-ca-bundle\") pod \"e5b356b4-9ba6-4971-966f-be9d4160b57d\" (UID: \"e5b356b4-9ba6-4971-966f-be9d4160b57d\") " Sep 29 19:15:37 crc kubenswrapper[4792]: I0929 19:15:37.856753 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/e5b356b4-9ba6-4971-966f-be9d4160b57d-fernet-keys\") pod \"e5b356b4-9ba6-4971-966f-be9d4160b57d\" (UID: \"e5b356b4-9ba6-4971-966f-be9d4160b57d\") " Sep 29 19:15:37 crc kubenswrapper[4792]: I0929 19:15:37.856804 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vnmv2\" (UniqueName: \"kubernetes.io/projected/e5b356b4-9ba6-4971-966f-be9d4160b57d-kube-api-access-vnmv2\") pod \"e5b356b4-9ba6-4971-966f-be9d4160b57d\" (UID: \"e5b356b4-9ba6-4971-966f-be9d4160b57d\") " Sep 29 19:15:37 crc kubenswrapper[4792]: I0929 19:15:37.862698 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e5b356b4-9ba6-4971-966f-be9d4160b57d-kube-api-access-vnmv2" (OuterVolumeSpecName: "kube-api-access-vnmv2") pod "e5b356b4-9ba6-4971-966f-be9d4160b57d" (UID: "e5b356b4-9ba6-4971-966f-be9d4160b57d"). InnerVolumeSpecName "kube-api-access-vnmv2". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:15:37 crc kubenswrapper[4792]: I0929 19:15:37.865751 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e5b356b4-9ba6-4971-966f-be9d4160b57d-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "e5b356b4-9ba6-4971-966f-be9d4160b57d" (UID: "e5b356b4-9ba6-4971-966f-be9d4160b57d"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:15:37 crc kubenswrapper[4792]: I0929 19:15:37.865879 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e5b356b4-9ba6-4971-966f-be9d4160b57d-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "e5b356b4-9ba6-4971-966f-be9d4160b57d" (UID: "e5b356b4-9ba6-4971-966f-be9d4160b57d"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:15:37 crc kubenswrapper[4792]: I0929 19:15:37.875413 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e5b356b4-9ba6-4971-966f-be9d4160b57d-scripts" (OuterVolumeSpecName: "scripts") pod "e5b356b4-9ba6-4971-966f-be9d4160b57d" (UID: "e5b356b4-9ba6-4971-966f-be9d4160b57d"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:15:37 crc kubenswrapper[4792]: I0929 19:15:37.883141 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e5b356b4-9ba6-4971-966f-be9d4160b57d-config-data" (OuterVolumeSpecName: "config-data") pod "e5b356b4-9ba6-4971-966f-be9d4160b57d" (UID: "e5b356b4-9ba6-4971-966f-be9d4160b57d"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:15:37 crc kubenswrapper[4792]: I0929 19:15:37.884015 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e5b356b4-9ba6-4971-966f-be9d4160b57d-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "e5b356b4-9ba6-4971-966f-be9d4160b57d" (UID: "e5b356b4-9ba6-4971-966f-be9d4160b57d"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:15:37 crc kubenswrapper[4792]: I0929 19:15:37.958700 4792 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e5b356b4-9ba6-4971-966f-be9d4160b57d-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 19:15:37 crc kubenswrapper[4792]: I0929 19:15:37.958732 4792 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e5b356b4-9ba6-4971-966f-be9d4160b57d-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 19:15:37 crc kubenswrapper[4792]: I0929 19:15:37.958744 4792 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/e5b356b4-9ba6-4971-966f-be9d4160b57d-fernet-keys\") on node \"crc\" DevicePath \"\"" Sep 29 19:15:37 crc kubenswrapper[4792]: I0929 19:15:37.958753 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vnmv2\" (UniqueName: \"kubernetes.io/projected/e5b356b4-9ba6-4971-966f-be9d4160b57d-kube-api-access-vnmv2\") on node \"crc\" DevicePath \"\"" Sep 29 19:15:37 crc kubenswrapper[4792]: I0929 19:15:37.958763 4792 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/e5b356b4-9ba6-4971-966f-be9d4160b57d-credential-keys\") on node \"crc\" DevicePath \"\"" Sep 29 19:15:37 crc kubenswrapper[4792]: I0929 19:15:37.958771 4792 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e5b356b4-9ba6-4971-966f-be9d4160b57d-scripts\") on node \"crc\" DevicePath \"\"" Sep 29 19:15:38 crc kubenswrapper[4792]: I0929 19:15:38.151381 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-5dhfl" event={"ID":"e5b356b4-9ba6-4971-966f-be9d4160b57d","Type":"ContainerDied","Data":"5c2ac36592efd87c9b72d533b940bf070811aa295ce51e0dc083be35063aa9ad"} Sep 29 19:15:38 crc kubenswrapper[4792]: I0929 19:15:38.151416 4792 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5c2ac36592efd87c9b72d533b940bf070811aa295ce51e0dc083be35063aa9ad" Sep 29 19:15:38 crc kubenswrapper[4792]: I0929 19:15:38.151466 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-5dhfl" Sep 29 19:15:38 crc kubenswrapper[4792]: I0929 19:15:38.860638 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-bootstrap-5dhfl"] Sep 29 19:15:38 crc kubenswrapper[4792]: I0929 19:15:38.916994 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-bootstrap-5dhfl"] Sep 29 19:15:38 crc kubenswrapper[4792]: I0929 19:15:38.942492 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-bootstrap-5vlr7"] Sep 29 19:15:38 crc kubenswrapper[4792]: E0929 19:15:38.942943 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e5b356b4-9ba6-4971-966f-be9d4160b57d" containerName="keystone-bootstrap" Sep 29 19:15:38 crc kubenswrapper[4792]: I0929 19:15:38.942963 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="e5b356b4-9ba6-4971-966f-be9d4160b57d" containerName="keystone-bootstrap" Sep 29 19:15:38 crc kubenswrapper[4792]: I0929 19:15:38.943455 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="e5b356b4-9ba6-4971-966f-be9d4160b57d" containerName="keystone-bootstrap" Sep 29 19:15:38 crc kubenswrapper[4792]: I0929 19:15:38.944040 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-5vlr7" Sep 29 19:15:38 crc kubenswrapper[4792]: I0929 19:15:38.947771 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Sep 29 19:15:38 crc kubenswrapper[4792]: I0929 19:15:38.947969 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-58chc" Sep 29 19:15:38 crc kubenswrapper[4792]: I0929 19:15:38.948085 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Sep 29 19:15:38 crc kubenswrapper[4792]: I0929 19:15:38.977876 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Sep 29 19:15:38 crc kubenswrapper[4792]: I0929 19:15:38.992229 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-5vlr7"] Sep 29 19:15:39 crc kubenswrapper[4792]: I0929 19:15:39.030328 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e5b356b4-9ba6-4971-966f-be9d4160b57d" path="/var/lib/kubelet/pods/e5b356b4-9ba6-4971-966f-be9d4160b57d/volumes" Sep 29 19:15:39 crc kubenswrapper[4792]: I0929 19:15:39.087473 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1d12f836-d8e3-46a3-bc92-64dae426f114-config-data\") pod \"keystone-bootstrap-5vlr7\" (UID: \"1d12f836-d8e3-46a3-bc92-64dae426f114\") " pod="openstack/keystone-bootstrap-5vlr7" Sep 29 19:15:39 crc kubenswrapper[4792]: I0929 19:15:39.087543 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jwkft\" (UniqueName: \"kubernetes.io/projected/1d12f836-d8e3-46a3-bc92-64dae426f114-kube-api-access-jwkft\") pod \"keystone-bootstrap-5vlr7\" (UID: \"1d12f836-d8e3-46a3-bc92-64dae426f114\") " pod="openstack/keystone-bootstrap-5vlr7" Sep 29 19:15:39 crc kubenswrapper[4792]: I0929 19:15:39.087564 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1d12f836-d8e3-46a3-bc92-64dae426f114-scripts\") pod \"keystone-bootstrap-5vlr7\" (UID: \"1d12f836-d8e3-46a3-bc92-64dae426f114\") " pod="openstack/keystone-bootstrap-5vlr7" Sep 29 19:15:39 crc kubenswrapper[4792]: I0929 19:15:39.087648 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/1d12f836-d8e3-46a3-bc92-64dae426f114-credential-keys\") pod \"keystone-bootstrap-5vlr7\" (UID: \"1d12f836-d8e3-46a3-bc92-64dae426f114\") " pod="openstack/keystone-bootstrap-5vlr7" Sep 29 19:15:39 crc kubenswrapper[4792]: I0929 19:15:39.087728 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/1d12f836-d8e3-46a3-bc92-64dae426f114-fernet-keys\") pod \"keystone-bootstrap-5vlr7\" (UID: \"1d12f836-d8e3-46a3-bc92-64dae426f114\") " pod="openstack/keystone-bootstrap-5vlr7" Sep 29 19:15:39 crc kubenswrapper[4792]: I0929 19:15:39.087748 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1d12f836-d8e3-46a3-bc92-64dae426f114-combined-ca-bundle\") pod \"keystone-bootstrap-5vlr7\" (UID: \"1d12f836-d8e3-46a3-bc92-64dae426f114\") " pod="openstack/keystone-bootstrap-5vlr7" Sep 29 19:15:39 crc kubenswrapper[4792]: I0929 19:15:39.190529 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/1d12f836-d8e3-46a3-bc92-64dae426f114-fernet-keys\") pod \"keystone-bootstrap-5vlr7\" (UID: \"1d12f836-d8e3-46a3-bc92-64dae426f114\") " pod="openstack/keystone-bootstrap-5vlr7" Sep 29 19:15:39 crc kubenswrapper[4792]: I0929 19:15:39.190601 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1d12f836-d8e3-46a3-bc92-64dae426f114-combined-ca-bundle\") pod \"keystone-bootstrap-5vlr7\" (UID: \"1d12f836-d8e3-46a3-bc92-64dae426f114\") " pod="openstack/keystone-bootstrap-5vlr7" Sep 29 19:15:39 crc kubenswrapper[4792]: I0929 19:15:39.191506 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1d12f836-d8e3-46a3-bc92-64dae426f114-config-data\") pod \"keystone-bootstrap-5vlr7\" (UID: \"1d12f836-d8e3-46a3-bc92-64dae426f114\") " pod="openstack/keystone-bootstrap-5vlr7" Sep 29 19:15:39 crc kubenswrapper[4792]: I0929 19:15:39.191551 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jwkft\" (UniqueName: \"kubernetes.io/projected/1d12f836-d8e3-46a3-bc92-64dae426f114-kube-api-access-jwkft\") pod \"keystone-bootstrap-5vlr7\" (UID: \"1d12f836-d8e3-46a3-bc92-64dae426f114\") " pod="openstack/keystone-bootstrap-5vlr7" Sep 29 19:15:39 crc kubenswrapper[4792]: I0929 19:15:39.191575 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1d12f836-d8e3-46a3-bc92-64dae426f114-scripts\") pod \"keystone-bootstrap-5vlr7\" (UID: \"1d12f836-d8e3-46a3-bc92-64dae426f114\") " pod="openstack/keystone-bootstrap-5vlr7" Sep 29 19:15:39 crc kubenswrapper[4792]: I0929 19:15:39.191614 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/1d12f836-d8e3-46a3-bc92-64dae426f114-credential-keys\") pod \"keystone-bootstrap-5vlr7\" (UID: \"1d12f836-d8e3-46a3-bc92-64dae426f114\") " pod="openstack/keystone-bootstrap-5vlr7" Sep 29 19:15:39 crc kubenswrapper[4792]: I0929 19:15:39.196435 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1d12f836-d8e3-46a3-bc92-64dae426f114-scripts\") pod \"keystone-bootstrap-5vlr7\" (UID: \"1d12f836-d8e3-46a3-bc92-64dae426f114\") " pod="openstack/keystone-bootstrap-5vlr7" Sep 29 19:15:39 crc kubenswrapper[4792]: I0929 19:15:39.196668 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/1d12f836-d8e3-46a3-bc92-64dae426f114-fernet-keys\") pod \"keystone-bootstrap-5vlr7\" (UID: \"1d12f836-d8e3-46a3-bc92-64dae426f114\") " pod="openstack/keystone-bootstrap-5vlr7" Sep 29 19:15:39 crc kubenswrapper[4792]: I0929 19:15:39.196817 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1d12f836-d8e3-46a3-bc92-64dae426f114-combined-ca-bundle\") pod \"keystone-bootstrap-5vlr7\" (UID: \"1d12f836-d8e3-46a3-bc92-64dae426f114\") " pod="openstack/keystone-bootstrap-5vlr7" Sep 29 19:15:39 crc kubenswrapper[4792]: I0929 19:15:39.197803 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1d12f836-d8e3-46a3-bc92-64dae426f114-config-data\") pod \"keystone-bootstrap-5vlr7\" (UID: \"1d12f836-d8e3-46a3-bc92-64dae426f114\") " pod="openstack/keystone-bootstrap-5vlr7" Sep 29 19:15:39 crc kubenswrapper[4792]: I0929 19:15:39.198488 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/1d12f836-d8e3-46a3-bc92-64dae426f114-credential-keys\") pod \"keystone-bootstrap-5vlr7\" (UID: \"1d12f836-d8e3-46a3-bc92-64dae426f114\") " pod="openstack/keystone-bootstrap-5vlr7" Sep 29 19:15:39 crc kubenswrapper[4792]: I0929 19:15:39.214815 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jwkft\" (UniqueName: \"kubernetes.io/projected/1d12f836-d8e3-46a3-bc92-64dae426f114-kube-api-access-jwkft\") pod \"keystone-bootstrap-5vlr7\" (UID: \"1d12f836-d8e3-46a3-bc92-64dae426f114\") " pod="openstack/keystone-bootstrap-5vlr7" Sep 29 19:15:39 crc kubenswrapper[4792]: I0929 19:15:39.294925 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-5vlr7" Sep 29 19:15:40 crc kubenswrapper[4792]: I0929 19:15:40.380490 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Sep 29 19:15:40 crc kubenswrapper[4792]: I0929 19:15:40.517075 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e93ef914-ef1e-40a1-a7d9-5866f9a6e454-config-data\") pod \"e93ef914-ef1e-40a1-a7d9-5866f9a6e454\" (UID: \"e93ef914-ef1e-40a1-a7d9-5866f9a6e454\") " Sep 29 19:15:40 crc kubenswrapper[4792]: I0929 19:15:40.517215 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/e93ef914-ef1e-40a1-a7d9-5866f9a6e454-httpd-run\") pod \"e93ef914-ef1e-40a1-a7d9-5866f9a6e454\" (UID: \"e93ef914-ef1e-40a1-a7d9-5866f9a6e454\") " Sep 29 19:15:40 crc kubenswrapper[4792]: I0929 19:15:40.517260 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/e93ef914-ef1e-40a1-a7d9-5866f9a6e454-internal-tls-certs\") pod \"e93ef914-ef1e-40a1-a7d9-5866f9a6e454\" (UID: \"e93ef914-ef1e-40a1-a7d9-5866f9a6e454\") " Sep 29 19:15:40 crc kubenswrapper[4792]: I0929 19:15:40.517275 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"e93ef914-ef1e-40a1-a7d9-5866f9a6e454\" (UID: \"e93ef914-ef1e-40a1-a7d9-5866f9a6e454\") " Sep 29 19:15:40 crc kubenswrapper[4792]: I0929 19:15:40.517318 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e93ef914-ef1e-40a1-a7d9-5866f9a6e454-combined-ca-bundle\") pod \"e93ef914-ef1e-40a1-a7d9-5866f9a6e454\" (UID: \"e93ef914-ef1e-40a1-a7d9-5866f9a6e454\") " Sep 29 19:15:40 crc kubenswrapper[4792]: I0929 19:15:40.517345 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e93ef914-ef1e-40a1-a7d9-5866f9a6e454-scripts\") pod \"e93ef914-ef1e-40a1-a7d9-5866f9a6e454\" (UID: \"e93ef914-ef1e-40a1-a7d9-5866f9a6e454\") " Sep 29 19:15:40 crc kubenswrapper[4792]: I0929 19:15:40.517394 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e93ef914-ef1e-40a1-a7d9-5866f9a6e454-logs\") pod \"e93ef914-ef1e-40a1-a7d9-5866f9a6e454\" (UID: \"e93ef914-ef1e-40a1-a7d9-5866f9a6e454\") " Sep 29 19:15:40 crc kubenswrapper[4792]: I0929 19:15:40.517432 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fwt9g\" (UniqueName: \"kubernetes.io/projected/e93ef914-ef1e-40a1-a7d9-5866f9a6e454-kube-api-access-fwt9g\") pod \"e93ef914-ef1e-40a1-a7d9-5866f9a6e454\" (UID: \"e93ef914-ef1e-40a1-a7d9-5866f9a6e454\") " Sep 29 19:15:40 crc kubenswrapper[4792]: I0929 19:15:40.522393 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage12-crc" (OuterVolumeSpecName: "glance") pod "e93ef914-ef1e-40a1-a7d9-5866f9a6e454" (UID: "e93ef914-ef1e-40a1-a7d9-5866f9a6e454"). InnerVolumeSpecName "local-storage12-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Sep 29 19:15:40 crc kubenswrapper[4792]: I0929 19:15:40.524581 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e93ef914-ef1e-40a1-a7d9-5866f9a6e454-kube-api-access-fwt9g" (OuterVolumeSpecName: "kube-api-access-fwt9g") pod "e93ef914-ef1e-40a1-a7d9-5866f9a6e454" (UID: "e93ef914-ef1e-40a1-a7d9-5866f9a6e454"). InnerVolumeSpecName "kube-api-access-fwt9g". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:15:40 crc kubenswrapper[4792]: I0929 19:15:40.534323 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e93ef914-ef1e-40a1-a7d9-5866f9a6e454-logs" (OuterVolumeSpecName: "logs") pod "e93ef914-ef1e-40a1-a7d9-5866f9a6e454" (UID: "e93ef914-ef1e-40a1-a7d9-5866f9a6e454"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 19:15:40 crc kubenswrapper[4792]: I0929 19:15:40.534541 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e93ef914-ef1e-40a1-a7d9-5866f9a6e454-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "e93ef914-ef1e-40a1-a7d9-5866f9a6e454" (UID: "e93ef914-ef1e-40a1-a7d9-5866f9a6e454"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 19:15:40 crc kubenswrapper[4792]: I0929 19:15:40.554768 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e93ef914-ef1e-40a1-a7d9-5866f9a6e454-scripts" (OuterVolumeSpecName: "scripts") pod "e93ef914-ef1e-40a1-a7d9-5866f9a6e454" (UID: "e93ef914-ef1e-40a1-a7d9-5866f9a6e454"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:15:40 crc kubenswrapper[4792]: I0929 19:15:40.587632 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e93ef914-ef1e-40a1-a7d9-5866f9a6e454-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "e93ef914-ef1e-40a1-a7d9-5866f9a6e454" (UID: "e93ef914-ef1e-40a1-a7d9-5866f9a6e454"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:15:40 crc kubenswrapper[4792]: I0929 19:15:40.599152 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e93ef914-ef1e-40a1-a7d9-5866f9a6e454-config-data" (OuterVolumeSpecName: "config-data") pod "e93ef914-ef1e-40a1-a7d9-5866f9a6e454" (UID: "e93ef914-ef1e-40a1-a7d9-5866f9a6e454"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:15:40 crc kubenswrapper[4792]: I0929 19:15:40.614256 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e93ef914-ef1e-40a1-a7d9-5866f9a6e454-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "e93ef914-ef1e-40a1-a7d9-5866f9a6e454" (UID: "e93ef914-ef1e-40a1-a7d9-5866f9a6e454"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:15:40 crc kubenswrapper[4792]: I0929 19:15:40.620309 4792 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e93ef914-ef1e-40a1-a7d9-5866f9a6e454-logs\") on node \"crc\" DevicePath \"\"" Sep 29 19:15:40 crc kubenswrapper[4792]: I0929 19:15:40.631673 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fwt9g\" (UniqueName: \"kubernetes.io/projected/e93ef914-ef1e-40a1-a7d9-5866f9a6e454-kube-api-access-fwt9g\") on node \"crc\" DevicePath \"\"" Sep 29 19:15:40 crc kubenswrapper[4792]: I0929 19:15:40.631691 4792 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e93ef914-ef1e-40a1-a7d9-5866f9a6e454-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 19:15:40 crc kubenswrapper[4792]: I0929 19:15:40.631704 4792 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/e93ef914-ef1e-40a1-a7d9-5866f9a6e454-httpd-run\") on node \"crc\" DevicePath \"\"" Sep 29 19:15:40 crc kubenswrapper[4792]: I0929 19:15:40.631716 4792 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/e93ef914-ef1e-40a1-a7d9-5866f9a6e454-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Sep 29 19:15:40 crc kubenswrapper[4792]: I0929 19:15:40.631749 4792 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") on node \"crc\" " Sep 29 19:15:40 crc kubenswrapper[4792]: I0929 19:15:40.631763 4792 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e93ef914-ef1e-40a1-a7d9-5866f9a6e454-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 19:15:40 crc kubenswrapper[4792]: I0929 19:15:40.631775 4792 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e93ef914-ef1e-40a1-a7d9-5866f9a6e454-scripts\") on node \"crc\" DevicePath \"\"" Sep 29 19:15:40 crc kubenswrapper[4792]: I0929 19:15:40.655199 4792 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage12-crc" (UniqueName: "kubernetes.io/local-volume/local-storage12-crc") on node "crc" Sep 29 19:15:40 crc kubenswrapper[4792]: I0929 19:15:40.732880 4792 reconciler_common.go:293] "Volume detached for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") on node \"crc\" DevicePath \"\"" Sep 29 19:15:41 crc kubenswrapper[4792]: I0929 19:15:41.201273 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"e93ef914-ef1e-40a1-a7d9-5866f9a6e454","Type":"ContainerDied","Data":"2abb559e23953eedcb6cb39c43fe6f72d74b27ac8d87c2592ce9ae4e608a691e"} Sep 29 19:15:41 crc kubenswrapper[4792]: I0929 19:15:41.201600 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Sep 29 19:15:41 crc kubenswrapper[4792]: I0929 19:15:41.234150 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Sep 29 19:15:41 crc kubenswrapper[4792]: I0929 19:15:41.247350 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-internal-api-0"] Sep 29 19:15:41 crc kubenswrapper[4792]: I0929 19:15:41.261194 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Sep 29 19:15:41 crc kubenswrapper[4792]: E0929 19:15:41.261708 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e93ef914-ef1e-40a1-a7d9-5866f9a6e454" containerName="glance-log" Sep 29 19:15:41 crc kubenswrapper[4792]: I0929 19:15:41.261734 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="e93ef914-ef1e-40a1-a7d9-5866f9a6e454" containerName="glance-log" Sep 29 19:15:41 crc kubenswrapper[4792]: E0929 19:15:41.261755 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e93ef914-ef1e-40a1-a7d9-5866f9a6e454" containerName="glance-httpd" Sep 29 19:15:41 crc kubenswrapper[4792]: I0929 19:15:41.261764 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="e93ef914-ef1e-40a1-a7d9-5866f9a6e454" containerName="glance-httpd" Sep 29 19:15:41 crc kubenswrapper[4792]: I0929 19:15:41.262015 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="e93ef914-ef1e-40a1-a7d9-5866f9a6e454" containerName="glance-httpd" Sep 29 19:15:41 crc kubenswrapper[4792]: I0929 19:15:41.262038 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="e93ef914-ef1e-40a1-a7d9-5866f9a6e454" containerName="glance-log" Sep 29 19:15:41 crc kubenswrapper[4792]: I0929 19:15:41.264781 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Sep 29 19:15:41 crc kubenswrapper[4792]: I0929 19:15:41.269524 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Sep 29 19:15:41 crc kubenswrapper[4792]: I0929 19:15:41.269722 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-internal-svc" Sep 29 19:15:41 crc kubenswrapper[4792]: I0929 19:15:41.281750 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Sep 29 19:15:41 crc kubenswrapper[4792]: I0929 19:15:41.445471 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cf2861f4-a8d6-4c0e-bb03-bd2c51def90a-config-data\") pod \"glance-default-internal-api-0\" (UID: \"cf2861f4-a8d6-4c0e-bb03-bd2c51def90a\") " pod="openstack/glance-default-internal-api-0" Sep 29 19:15:41 crc kubenswrapper[4792]: I0929 19:15:41.445524 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/cf2861f4-a8d6-4c0e-bb03-bd2c51def90a-logs\") pod \"glance-default-internal-api-0\" (UID: \"cf2861f4-a8d6-4c0e-bb03-bd2c51def90a\") " pod="openstack/glance-default-internal-api-0" Sep 29 19:15:41 crc kubenswrapper[4792]: I0929 19:15:41.445554 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cf2861f4-a8d6-4c0e-bb03-bd2c51def90a-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"cf2861f4-a8d6-4c0e-bb03-bd2c51def90a\") " pod="openstack/glance-default-internal-api-0" Sep 29 19:15:41 crc kubenswrapper[4792]: I0929 19:15:41.445578 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/cf2861f4-a8d6-4c0e-bb03-bd2c51def90a-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"cf2861f4-a8d6-4c0e-bb03-bd2c51def90a\") " pod="openstack/glance-default-internal-api-0" Sep 29 19:15:41 crc kubenswrapper[4792]: I0929 19:15:41.445628 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"glance-default-internal-api-0\" (UID: \"cf2861f4-a8d6-4c0e-bb03-bd2c51def90a\") " pod="openstack/glance-default-internal-api-0" Sep 29 19:15:41 crc kubenswrapper[4792]: I0929 19:15:41.445682 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ndvh2\" (UniqueName: \"kubernetes.io/projected/cf2861f4-a8d6-4c0e-bb03-bd2c51def90a-kube-api-access-ndvh2\") pod \"glance-default-internal-api-0\" (UID: \"cf2861f4-a8d6-4c0e-bb03-bd2c51def90a\") " pod="openstack/glance-default-internal-api-0" Sep 29 19:15:41 crc kubenswrapper[4792]: I0929 19:15:41.445727 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/cf2861f4-a8d6-4c0e-bb03-bd2c51def90a-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"cf2861f4-a8d6-4c0e-bb03-bd2c51def90a\") " pod="openstack/glance-default-internal-api-0" Sep 29 19:15:41 crc kubenswrapper[4792]: I0929 19:15:41.445757 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cf2861f4-a8d6-4c0e-bb03-bd2c51def90a-scripts\") pod \"glance-default-internal-api-0\" (UID: \"cf2861f4-a8d6-4c0e-bb03-bd2c51def90a\") " pod="openstack/glance-default-internal-api-0" Sep 29 19:15:41 crc kubenswrapper[4792]: I0929 19:15:41.546938 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cf2861f4-a8d6-4c0e-bb03-bd2c51def90a-config-data\") pod \"glance-default-internal-api-0\" (UID: \"cf2861f4-a8d6-4c0e-bb03-bd2c51def90a\") " pod="openstack/glance-default-internal-api-0" Sep 29 19:15:41 crc kubenswrapper[4792]: I0929 19:15:41.546977 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/cf2861f4-a8d6-4c0e-bb03-bd2c51def90a-logs\") pod \"glance-default-internal-api-0\" (UID: \"cf2861f4-a8d6-4c0e-bb03-bd2c51def90a\") " pod="openstack/glance-default-internal-api-0" Sep 29 19:15:41 crc kubenswrapper[4792]: I0929 19:15:41.547003 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cf2861f4-a8d6-4c0e-bb03-bd2c51def90a-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"cf2861f4-a8d6-4c0e-bb03-bd2c51def90a\") " pod="openstack/glance-default-internal-api-0" Sep 29 19:15:41 crc kubenswrapper[4792]: I0929 19:15:41.547027 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/cf2861f4-a8d6-4c0e-bb03-bd2c51def90a-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"cf2861f4-a8d6-4c0e-bb03-bd2c51def90a\") " pod="openstack/glance-default-internal-api-0" Sep 29 19:15:41 crc kubenswrapper[4792]: I0929 19:15:41.547047 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"glance-default-internal-api-0\" (UID: \"cf2861f4-a8d6-4c0e-bb03-bd2c51def90a\") " pod="openstack/glance-default-internal-api-0" Sep 29 19:15:41 crc kubenswrapper[4792]: I0929 19:15:41.547093 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ndvh2\" (UniqueName: \"kubernetes.io/projected/cf2861f4-a8d6-4c0e-bb03-bd2c51def90a-kube-api-access-ndvh2\") pod \"glance-default-internal-api-0\" (UID: \"cf2861f4-a8d6-4c0e-bb03-bd2c51def90a\") " pod="openstack/glance-default-internal-api-0" Sep 29 19:15:41 crc kubenswrapper[4792]: I0929 19:15:41.547123 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/cf2861f4-a8d6-4c0e-bb03-bd2c51def90a-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"cf2861f4-a8d6-4c0e-bb03-bd2c51def90a\") " pod="openstack/glance-default-internal-api-0" Sep 29 19:15:41 crc kubenswrapper[4792]: I0929 19:15:41.547145 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cf2861f4-a8d6-4c0e-bb03-bd2c51def90a-scripts\") pod \"glance-default-internal-api-0\" (UID: \"cf2861f4-a8d6-4c0e-bb03-bd2c51def90a\") " pod="openstack/glance-default-internal-api-0" Sep 29 19:15:41 crc kubenswrapper[4792]: I0929 19:15:41.547552 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/cf2861f4-a8d6-4c0e-bb03-bd2c51def90a-logs\") pod \"glance-default-internal-api-0\" (UID: \"cf2861f4-a8d6-4c0e-bb03-bd2c51def90a\") " pod="openstack/glance-default-internal-api-0" Sep 29 19:15:41 crc kubenswrapper[4792]: I0929 19:15:41.547602 4792 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"glance-default-internal-api-0\" (UID: \"cf2861f4-a8d6-4c0e-bb03-bd2c51def90a\") device mount path \"/mnt/openstack/pv12\"" pod="openstack/glance-default-internal-api-0" Sep 29 19:15:41 crc kubenswrapper[4792]: I0929 19:15:41.551455 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/cf2861f4-a8d6-4c0e-bb03-bd2c51def90a-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"cf2861f4-a8d6-4c0e-bb03-bd2c51def90a\") " pod="openstack/glance-default-internal-api-0" Sep 29 19:15:41 crc kubenswrapper[4792]: I0929 19:15:41.551736 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cf2861f4-a8d6-4c0e-bb03-bd2c51def90a-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"cf2861f4-a8d6-4c0e-bb03-bd2c51def90a\") " pod="openstack/glance-default-internal-api-0" Sep 29 19:15:41 crc kubenswrapper[4792]: I0929 19:15:41.553734 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cf2861f4-a8d6-4c0e-bb03-bd2c51def90a-scripts\") pod \"glance-default-internal-api-0\" (UID: \"cf2861f4-a8d6-4c0e-bb03-bd2c51def90a\") " pod="openstack/glance-default-internal-api-0" Sep 29 19:15:41 crc kubenswrapper[4792]: I0929 19:15:41.565150 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/cf2861f4-a8d6-4c0e-bb03-bd2c51def90a-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"cf2861f4-a8d6-4c0e-bb03-bd2c51def90a\") " pod="openstack/glance-default-internal-api-0" Sep 29 19:15:41 crc kubenswrapper[4792]: I0929 19:15:41.565650 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cf2861f4-a8d6-4c0e-bb03-bd2c51def90a-config-data\") pod \"glance-default-internal-api-0\" (UID: \"cf2861f4-a8d6-4c0e-bb03-bd2c51def90a\") " pod="openstack/glance-default-internal-api-0" Sep 29 19:15:41 crc kubenswrapper[4792]: I0929 19:15:41.568567 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ndvh2\" (UniqueName: \"kubernetes.io/projected/cf2861f4-a8d6-4c0e-bb03-bd2c51def90a-kube-api-access-ndvh2\") pod \"glance-default-internal-api-0\" (UID: \"cf2861f4-a8d6-4c0e-bb03-bd2c51def90a\") " pod="openstack/glance-default-internal-api-0" Sep 29 19:15:41 crc kubenswrapper[4792]: I0929 19:15:41.577361 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"glance-default-internal-api-0\" (UID: \"cf2861f4-a8d6-4c0e-bb03-bd2c51def90a\") " pod="openstack/glance-default-internal-api-0" Sep 29 19:15:41 crc kubenswrapper[4792]: I0929 19:15:41.616373 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Sep 29 19:15:43 crc kubenswrapper[4792]: I0929 19:15:43.027888 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e93ef914-ef1e-40a1-a7d9-5866f9a6e454" path="/var/lib/kubelet/pods/e93ef914-ef1e-40a1-a7d9-5866f9a6e454/volumes" Sep 29 19:15:46 crc kubenswrapper[4792]: I0929 19:15:46.904409 4792 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-b8fbc5445-mgs5n" podUID="e4b273af-3752-4d48-ae80-4d639b06e836" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.117:5353: i/o timeout" Sep 29 19:15:47 crc kubenswrapper[4792]: E0929 19:15:47.852160 4792 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-horizon:current-podified" Sep 29 19:15:47 crc kubenswrapper[4792]: E0929 19:15:47.852364 4792 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:horizon-log,Image:quay.io/podified-antelope-centos9/openstack-horizon:current-podified,Command:[/bin/bash],Args:[-c tail -n+1 -F /var/log/horizon/horizon.log],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n5cch67fhcbhffh68dh686h56bh54dh576h57dhch675hd6hb7h54dh58fh559hf8hf9h65fhf9h594hcdh5d4h5b9h9h68dh5cbh68fhb5h666h77q,ValueFrom:nil,},EnvVar{Name:ENABLE_DESIGNATE,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_HEAT,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_IRONIC,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_MANILA,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_OCTAVIA,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_WATCHER,Value:no,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},EnvVar{Name:UNPACK_THEME,Value:true,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:logs,ReadOnly:false,MountPath:/var/log/horizon,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-gsdpg,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*48,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*true,RunAsGroup:*42400,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod horizon-79f9946749-wjkgr_openstack(c1df6cb2-f030-42cc-8e73-e1cbfd6d55ef): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Sep 29 19:15:47 crc kubenswrapper[4792]: E0929 19:15:47.853318 4792 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-horizon:current-podified" Sep 29 19:15:47 crc kubenswrapper[4792]: E0929 19:15:47.853510 4792 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:horizon-log,Image:quay.io/podified-antelope-centos9/openstack-horizon:current-podified,Command:[/bin/bash],Args:[-c tail -n+1 -F /var/log/horizon/horizon.log],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n5hfch55h5c6h566h68h78h658h699h86h79h65bh598h5d5hc8h564h66bh578h65ch656hfhb4h6fh5c5h686h76hc9h5c6h575h594h577h598q,ValueFrom:nil,},EnvVar{Name:ENABLE_DESIGNATE,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_HEAT,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_IRONIC,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_MANILA,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_OCTAVIA,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_WATCHER,Value:no,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},EnvVar{Name:UNPACK_THEME,Value:true,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:logs,ReadOnly:false,MountPath:/var/log/horizon,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-6npsj,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*48,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*true,RunAsGroup:*42400,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod horizon-575755669c-6b5g8_openstack(03017320-13cd-4880-80e9-0834cb41a6bd): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Sep 29 19:15:47 crc kubenswrapper[4792]: E0929 19:15:47.862134 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"horizon-log\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\", failed to \"StartContainer\" for \"horizon\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-horizon:current-podified\\\"\"]" pod="openstack/horizon-79f9946749-wjkgr" podUID="c1df6cb2-f030-42cc-8e73-e1cbfd6d55ef" Sep 29 19:15:47 crc kubenswrapper[4792]: E0929 19:15:47.862875 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"horizon-log\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\", failed to \"StartContainer\" for \"horizon\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-horizon:current-podified\\\"\"]" pod="openstack/horizon-575755669c-6b5g8" podUID="03017320-13cd-4880-80e9-0834cb41a6bd" Sep 29 19:15:47 crc kubenswrapper[4792]: E0929 19:15:47.884283 4792 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-horizon:current-podified" Sep 29 19:15:47 crc kubenswrapper[4792]: E0929 19:15:47.884475 4792 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:horizon-log,Image:quay.io/podified-antelope-centos9/openstack-horizon:current-podified,Command:[/bin/bash],Args:[-c tail -n+1 -F /var/log/horizon/horizon.log],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n5chbfhb6h569h684h57h5f7h55dh66h58fh75hd8h554hc8h5d9h97h698hd7h5d5h584hdh59bhf4h59ch65h7dh97h85h5cbh5f4h7h546q,ValueFrom:nil,},EnvVar{Name:ENABLE_DESIGNATE,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_HEAT,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_IRONIC,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_MANILA,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_OCTAVIA,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_WATCHER,Value:no,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},EnvVar{Name:UNPACK_THEME,Value:true,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:logs,ReadOnly:false,MountPath:/var/log/horizon,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-h8tnq,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*48,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*true,RunAsGroup:*42400,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod horizon-7486b4cb6c-fr9s4_openstack(9953fe09-a48c-4c74-83a1-9de5e8cec46d): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Sep 29 19:15:47 crc kubenswrapper[4792]: E0929 19:15:47.886725 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"horizon-log\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\", failed to \"StartContainer\" for \"horizon\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-horizon:current-podified\\\"\"]" pod="openstack/horizon-7486b4cb6c-fr9s4" podUID="9953fe09-a48c-4c74-83a1-9de5e8cec46d" Sep 29 19:15:50 crc kubenswrapper[4792]: I0929 19:15:50.274102 4792 generic.go:334] "Generic (PLEG): container finished" podID="72bc741a-4542-48fb-b65c-c7a12570d80a" containerID="6a5885959acd64b15fe20cfb3870e826808b5a9916e3c522a51fba99a1bcbcf7" exitCode=0 Sep 29 19:15:50 crc kubenswrapper[4792]: I0929 19:15:50.274674 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-j6jb8" event={"ID":"72bc741a-4542-48fb-b65c-c7a12570d80a","Type":"ContainerDied","Data":"6a5885959acd64b15fe20cfb3870e826808b5a9916e3c522a51fba99a1bcbcf7"} Sep 29 19:15:51 crc kubenswrapper[4792]: I0929 19:15:51.912660 4792 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-b8fbc5445-mgs5n" podUID="e4b273af-3752-4d48-ae80-4d639b06e836" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.117:5353: i/o timeout" Sep 29 19:15:51 crc kubenswrapper[4792]: I0929 19:15:51.913528 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-b8fbc5445-mgs5n" Sep 29 19:15:56 crc kubenswrapper[4792]: I0929 19:15:56.914059 4792 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-b8fbc5445-mgs5n" podUID="e4b273af-3752-4d48-ae80-4d639b06e836" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.117:5353: i/o timeout" Sep 29 19:15:58 crc kubenswrapper[4792]: I0929 19:15:58.181067 4792 scope.go:117] "RemoveContainer" containerID="cc20a227c88234a2e2f6da7a8eecb6bc5259ad27eb3b3bd89926559b7daeb6c2" Sep 29 19:15:58 crc kubenswrapper[4792]: I0929 19:15:58.331790 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-b8fbc5445-mgs5n" Sep 29 19:15:58 crc kubenswrapper[4792]: I0929 19:15:58.390732 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-b8fbc5445-mgs5n" event={"ID":"e4b273af-3752-4d48-ae80-4d639b06e836","Type":"ContainerDied","Data":"c8dcbfadbd3594ec133d924ba47e910a1a469ec2b09704c4dd3ac676dc803048"} Sep 29 19:15:58 crc kubenswrapper[4792]: I0929 19:15:58.390763 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-b8fbc5445-mgs5n" Sep 29 19:15:58 crc kubenswrapper[4792]: I0929 19:15:58.500740 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e4b273af-3752-4d48-ae80-4d639b06e836-ovsdbserver-nb\") pod \"e4b273af-3752-4d48-ae80-4d639b06e836\" (UID: \"e4b273af-3752-4d48-ae80-4d639b06e836\") " Sep 29 19:15:58 crc kubenswrapper[4792]: I0929 19:15:58.500791 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lxz8n\" (UniqueName: \"kubernetes.io/projected/e4b273af-3752-4d48-ae80-4d639b06e836-kube-api-access-lxz8n\") pod \"e4b273af-3752-4d48-ae80-4d639b06e836\" (UID: \"e4b273af-3752-4d48-ae80-4d639b06e836\") " Sep 29 19:15:58 crc kubenswrapper[4792]: I0929 19:15:58.500876 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e4b273af-3752-4d48-ae80-4d639b06e836-dns-svc\") pod \"e4b273af-3752-4d48-ae80-4d639b06e836\" (UID: \"e4b273af-3752-4d48-ae80-4d639b06e836\") " Sep 29 19:15:58 crc kubenswrapper[4792]: I0929 19:15:58.500977 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e4b273af-3752-4d48-ae80-4d639b06e836-config\") pod \"e4b273af-3752-4d48-ae80-4d639b06e836\" (UID: \"e4b273af-3752-4d48-ae80-4d639b06e836\") " Sep 29 19:15:58 crc kubenswrapper[4792]: I0929 19:15:58.501042 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e4b273af-3752-4d48-ae80-4d639b06e836-ovsdbserver-sb\") pod \"e4b273af-3752-4d48-ae80-4d639b06e836\" (UID: \"e4b273af-3752-4d48-ae80-4d639b06e836\") " Sep 29 19:15:58 crc kubenswrapper[4792]: I0929 19:15:58.522484 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e4b273af-3752-4d48-ae80-4d639b06e836-kube-api-access-lxz8n" (OuterVolumeSpecName: "kube-api-access-lxz8n") pod "e4b273af-3752-4d48-ae80-4d639b06e836" (UID: "e4b273af-3752-4d48-ae80-4d639b06e836"). InnerVolumeSpecName "kube-api-access-lxz8n". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:15:58 crc kubenswrapper[4792]: I0929 19:15:58.548396 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e4b273af-3752-4d48-ae80-4d639b06e836-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "e4b273af-3752-4d48-ae80-4d639b06e836" (UID: "e4b273af-3752-4d48-ae80-4d639b06e836"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:15:58 crc kubenswrapper[4792]: I0929 19:15:58.559038 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e4b273af-3752-4d48-ae80-4d639b06e836-config" (OuterVolumeSpecName: "config") pod "e4b273af-3752-4d48-ae80-4d639b06e836" (UID: "e4b273af-3752-4d48-ae80-4d639b06e836"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:15:58 crc kubenswrapper[4792]: I0929 19:15:58.560355 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e4b273af-3752-4d48-ae80-4d639b06e836-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "e4b273af-3752-4d48-ae80-4d639b06e836" (UID: "e4b273af-3752-4d48-ae80-4d639b06e836"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:15:58 crc kubenswrapper[4792]: I0929 19:15:58.562271 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e4b273af-3752-4d48-ae80-4d639b06e836-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "e4b273af-3752-4d48-ae80-4d639b06e836" (UID: "e4b273af-3752-4d48-ae80-4d639b06e836"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:15:58 crc kubenswrapper[4792]: I0929 19:15:58.603827 4792 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e4b273af-3752-4d48-ae80-4d639b06e836-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Sep 29 19:15:58 crc kubenswrapper[4792]: I0929 19:15:58.603896 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lxz8n\" (UniqueName: \"kubernetes.io/projected/e4b273af-3752-4d48-ae80-4d639b06e836-kube-api-access-lxz8n\") on node \"crc\" DevicePath \"\"" Sep 29 19:15:58 crc kubenswrapper[4792]: I0929 19:15:58.603912 4792 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e4b273af-3752-4d48-ae80-4d639b06e836-dns-svc\") on node \"crc\" DevicePath \"\"" Sep 29 19:15:58 crc kubenswrapper[4792]: I0929 19:15:58.603923 4792 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e4b273af-3752-4d48-ae80-4d639b06e836-config\") on node \"crc\" DevicePath \"\"" Sep 29 19:15:58 crc kubenswrapper[4792]: I0929 19:15:58.603935 4792 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e4b273af-3752-4d48-ae80-4d639b06e836-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Sep 29 19:15:58 crc kubenswrapper[4792]: I0929 19:15:58.725832 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-b8fbc5445-mgs5n"] Sep 29 19:15:58 crc kubenswrapper[4792]: I0929 19:15:58.731235 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-b8fbc5445-mgs5n"] Sep 29 19:15:58 crc kubenswrapper[4792]: E0929 19:15:58.820982 4792 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-ceilometer-central:current-podified" Sep 29 19:15:58 crc kubenswrapper[4792]: E0929 19:15:58.821136 4792 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:ceilometer-central-agent,Image:quay.io/podified-antelope-centos9/openstack-ceilometer-central:current-podified,Command:[/bin/bash],Args:[-c /usr/local/bin/kolla_set_configs && /usr/local/bin/kolla_start],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n5dfh569h649h644h59h56h557h56h67fh656h9h5b7h67dhf8h56fh8fhd5h559h55dh5dfh669h68ch674hf7hf7h88h685h565h589h579hf7h5f8q,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:scripts,ReadOnly:true,MountPath:/var/lib/openstack/bin,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/openstack/config,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/kolla/config_files/config.json,SubPath:ceilometer-central-config.json,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-n7bw8,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:&ExecAction{Command:[/usr/bin/python3 /var/lib/openstack/bin/centralhealth.py],},HTTPGet:nil,TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:300,TimeoutSeconds:5,PeriodSeconds:5,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*0,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod ceilometer-0_openstack(06538688-0bb7-45ae-a249-94ba5c312b2b): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Sep 29 19:15:58 crc kubenswrapper[4792]: I0929 19:15:58.825423 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-j6jb8" Sep 29 19:15:59 crc kubenswrapper[4792]: I0929 19:15:59.009454 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/72bc741a-4542-48fb-b65c-c7a12570d80a-combined-ca-bundle\") pod \"72bc741a-4542-48fb-b65c-c7a12570d80a\" (UID: \"72bc741a-4542-48fb-b65c-c7a12570d80a\") " Sep 29 19:15:59 crc kubenswrapper[4792]: I0929 19:15:59.009530 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hldjl\" (UniqueName: \"kubernetes.io/projected/72bc741a-4542-48fb-b65c-c7a12570d80a-kube-api-access-hldjl\") pod \"72bc741a-4542-48fb-b65c-c7a12570d80a\" (UID: \"72bc741a-4542-48fb-b65c-c7a12570d80a\") " Sep 29 19:15:59 crc kubenswrapper[4792]: I0929 19:15:59.009553 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/72bc741a-4542-48fb-b65c-c7a12570d80a-config\") pod \"72bc741a-4542-48fb-b65c-c7a12570d80a\" (UID: \"72bc741a-4542-48fb-b65c-c7a12570d80a\") " Sep 29 19:15:59 crc kubenswrapper[4792]: I0929 19:15:59.012602 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/72bc741a-4542-48fb-b65c-c7a12570d80a-kube-api-access-hldjl" (OuterVolumeSpecName: "kube-api-access-hldjl") pod "72bc741a-4542-48fb-b65c-c7a12570d80a" (UID: "72bc741a-4542-48fb-b65c-c7a12570d80a"). InnerVolumeSpecName "kube-api-access-hldjl". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:15:59 crc kubenswrapper[4792]: I0929 19:15:59.031083 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e4b273af-3752-4d48-ae80-4d639b06e836" path="/var/lib/kubelet/pods/e4b273af-3752-4d48-ae80-4d639b06e836/volumes" Sep 29 19:15:59 crc kubenswrapper[4792]: I0929 19:15:59.036560 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/72bc741a-4542-48fb-b65c-c7a12570d80a-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "72bc741a-4542-48fb-b65c-c7a12570d80a" (UID: "72bc741a-4542-48fb-b65c-c7a12570d80a"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:15:59 crc kubenswrapper[4792]: I0929 19:15:59.037819 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/72bc741a-4542-48fb-b65c-c7a12570d80a-config" (OuterVolumeSpecName: "config") pod "72bc741a-4542-48fb-b65c-c7a12570d80a" (UID: "72bc741a-4542-48fb-b65c-c7a12570d80a"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:15:59 crc kubenswrapper[4792]: I0929 19:15:59.111670 4792 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/72bc741a-4542-48fb-b65c-c7a12570d80a-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 19:15:59 crc kubenswrapper[4792]: I0929 19:15:59.111981 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hldjl\" (UniqueName: \"kubernetes.io/projected/72bc741a-4542-48fb-b65c-c7a12570d80a-kube-api-access-hldjl\") on node \"crc\" DevicePath \"\"" Sep 29 19:15:59 crc kubenswrapper[4792]: I0929 19:15:59.112000 4792 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/72bc741a-4542-48fb-b65c-c7a12570d80a-config\") on node \"crc\" DevicePath \"\"" Sep 29 19:15:59 crc kubenswrapper[4792]: I0929 19:15:59.403897 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-j6jb8" event={"ID":"72bc741a-4542-48fb-b65c-c7a12570d80a","Type":"ContainerDied","Data":"feb60b91ffb62bd44316b44c35cb567325abf31ea93a2e7b09c4c11b43639e30"} Sep 29 19:15:59 crc kubenswrapper[4792]: I0929 19:15:59.403952 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-j6jb8" Sep 29 19:15:59 crc kubenswrapper[4792]: I0929 19:15:59.403972 4792 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="feb60b91ffb62bd44316b44c35cb567325abf31ea93a2e7b09c4c11b43639e30" Sep 29 19:16:00 crc kubenswrapper[4792]: I0929 19:16:00.103260 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-6b7b667979-6qkxx"] Sep 29 19:16:00 crc kubenswrapper[4792]: E0929 19:16:00.103599 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="72bc741a-4542-48fb-b65c-c7a12570d80a" containerName="neutron-db-sync" Sep 29 19:16:00 crc kubenswrapper[4792]: I0929 19:16:00.103610 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="72bc741a-4542-48fb-b65c-c7a12570d80a" containerName="neutron-db-sync" Sep 29 19:16:00 crc kubenswrapper[4792]: E0929 19:16:00.103624 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e4b273af-3752-4d48-ae80-4d639b06e836" containerName="init" Sep 29 19:16:00 crc kubenswrapper[4792]: I0929 19:16:00.103629 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="e4b273af-3752-4d48-ae80-4d639b06e836" containerName="init" Sep 29 19:16:00 crc kubenswrapper[4792]: E0929 19:16:00.103643 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e4b273af-3752-4d48-ae80-4d639b06e836" containerName="dnsmasq-dns" Sep 29 19:16:00 crc kubenswrapper[4792]: I0929 19:16:00.103650 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="e4b273af-3752-4d48-ae80-4d639b06e836" containerName="dnsmasq-dns" Sep 29 19:16:00 crc kubenswrapper[4792]: I0929 19:16:00.103808 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="72bc741a-4542-48fb-b65c-c7a12570d80a" containerName="neutron-db-sync" Sep 29 19:16:00 crc kubenswrapper[4792]: I0929 19:16:00.103830 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="e4b273af-3752-4d48-ae80-4d639b06e836" containerName="dnsmasq-dns" Sep 29 19:16:00 crc kubenswrapper[4792]: I0929 19:16:00.105203 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6b7b667979-6qkxx" Sep 29 19:16:00 crc kubenswrapper[4792]: I0929 19:16:00.177831 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6b7b667979-6qkxx"] Sep 29 19:16:00 crc kubenswrapper[4792]: I0929 19:16:00.234947 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-76468fd5f8-gfqwb"] Sep 29 19:16:00 crc kubenswrapper[4792]: I0929 19:16:00.236193 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2dfz6\" (UniqueName: \"kubernetes.io/projected/b7b5b809-50d5-467e-9faf-3d4398f81b15-kube-api-access-2dfz6\") pod \"dnsmasq-dns-6b7b667979-6qkxx\" (UID: \"b7b5b809-50d5-467e-9faf-3d4398f81b15\") " pod="openstack/dnsmasq-dns-6b7b667979-6qkxx" Sep 29 19:16:00 crc kubenswrapper[4792]: I0929 19:16:00.236260 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b7b5b809-50d5-467e-9faf-3d4398f81b15-ovsdbserver-nb\") pod \"dnsmasq-dns-6b7b667979-6qkxx\" (UID: \"b7b5b809-50d5-467e-9faf-3d4398f81b15\") " pod="openstack/dnsmasq-dns-6b7b667979-6qkxx" Sep 29 19:16:00 crc kubenswrapper[4792]: I0929 19:16:00.236306 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b7b5b809-50d5-467e-9faf-3d4398f81b15-config\") pod \"dnsmasq-dns-6b7b667979-6qkxx\" (UID: \"b7b5b809-50d5-467e-9faf-3d4398f81b15\") " pod="openstack/dnsmasq-dns-6b7b667979-6qkxx" Sep 29 19:16:00 crc kubenswrapper[4792]: I0929 19:16:00.236335 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b7b5b809-50d5-467e-9faf-3d4398f81b15-dns-svc\") pod \"dnsmasq-dns-6b7b667979-6qkxx\" (UID: \"b7b5b809-50d5-467e-9faf-3d4398f81b15\") " pod="openstack/dnsmasq-dns-6b7b667979-6qkxx" Sep 29 19:16:00 crc kubenswrapper[4792]: I0929 19:16:00.236369 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/b7b5b809-50d5-467e-9faf-3d4398f81b15-ovsdbserver-sb\") pod \"dnsmasq-dns-6b7b667979-6qkxx\" (UID: \"b7b5b809-50d5-467e-9faf-3d4398f81b15\") " pod="openstack/dnsmasq-dns-6b7b667979-6qkxx" Sep 29 19:16:00 crc kubenswrapper[4792]: I0929 19:16:00.236399 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/b7b5b809-50d5-467e-9faf-3d4398f81b15-dns-swift-storage-0\") pod \"dnsmasq-dns-6b7b667979-6qkxx\" (UID: \"b7b5b809-50d5-467e-9faf-3d4398f81b15\") " pod="openstack/dnsmasq-dns-6b7b667979-6qkxx" Sep 29 19:16:00 crc kubenswrapper[4792]: I0929 19:16:00.240233 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-76468fd5f8-gfqwb" Sep 29 19:16:00 crc kubenswrapper[4792]: I0929 19:16:00.244789 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-config" Sep 29 19:16:00 crc kubenswrapper[4792]: I0929 19:16:00.245141 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-httpd-config" Sep 29 19:16:00 crc kubenswrapper[4792]: I0929 19:16:00.245294 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-neutron-dockercfg-jk6r4" Sep 29 19:16:00 crc kubenswrapper[4792]: I0929 19:16:00.245530 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-ovndbs" Sep 29 19:16:00 crc kubenswrapper[4792]: I0929 19:16:00.265637 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-76468fd5f8-gfqwb"] Sep 29 19:16:00 crc kubenswrapper[4792]: I0929 19:16:00.337566 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b7b5b809-50d5-467e-9faf-3d4398f81b15-ovsdbserver-nb\") pod \"dnsmasq-dns-6b7b667979-6qkxx\" (UID: \"b7b5b809-50d5-467e-9faf-3d4398f81b15\") " pod="openstack/dnsmasq-dns-6b7b667979-6qkxx" Sep 29 19:16:00 crc kubenswrapper[4792]: I0929 19:16:00.337651 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b7b5b809-50d5-467e-9faf-3d4398f81b15-config\") pod \"dnsmasq-dns-6b7b667979-6qkxx\" (UID: \"b7b5b809-50d5-467e-9faf-3d4398f81b15\") " pod="openstack/dnsmasq-dns-6b7b667979-6qkxx" Sep 29 19:16:00 crc kubenswrapper[4792]: I0929 19:16:00.337685 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b7b5b809-50d5-467e-9faf-3d4398f81b15-dns-svc\") pod \"dnsmasq-dns-6b7b667979-6qkxx\" (UID: \"b7b5b809-50d5-467e-9faf-3d4398f81b15\") " pod="openstack/dnsmasq-dns-6b7b667979-6qkxx" Sep 29 19:16:00 crc kubenswrapper[4792]: I0929 19:16:00.338898 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b7b5b809-50d5-467e-9faf-3d4398f81b15-dns-svc\") pod \"dnsmasq-dns-6b7b667979-6qkxx\" (UID: \"b7b5b809-50d5-467e-9faf-3d4398f81b15\") " pod="openstack/dnsmasq-dns-6b7b667979-6qkxx" Sep 29 19:16:00 crc kubenswrapper[4792]: I0929 19:16:00.338967 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/b7b5b809-50d5-467e-9faf-3d4398f81b15-ovsdbserver-sb\") pod \"dnsmasq-dns-6b7b667979-6qkxx\" (UID: \"b7b5b809-50d5-467e-9faf-3d4398f81b15\") " pod="openstack/dnsmasq-dns-6b7b667979-6qkxx" Sep 29 19:16:00 crc kubenswrapper[4792]: I0929 19:16:00.339004 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/b7b5b809-50d5-467e-9faf-3d4398f81b15-dns-swift-storage-0\") pod \"dnsmasq-dns-6b7b667979-6qkxx\" (UID: \"b7b5b809-50d5-467e-9faf-3d4398f81b15\") " pod="openstack/dnsmasq-dns-6b7b667979-6qkxx" Sep 29 19:16:00 crc kubenswrapper[4792]: I0929 19:16:00.339098 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2dfz6\" (UniqueName: \"kubernetes.io/projected/b7b5b809-50d5-467e-9faf-3d4398f81b15-kube-api-access-2dfz6\") pod \"dnsmasq-dns-6b7b667979-6qkxx\" (UID: \"b7b5b809-50d5-467e-9faf-3d4398f81b15\") " pod="openstack/dnsmasq-dns-6b7b667979-6qkxx" Sep 29 19:16:00 crc kubenswrapper[4792]: I0929 19:16:00.340111 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b7b5b809-50d5-467e-9faf-3d4398f81b15-ovsdbserver-nb\") pod \"dnsmasq-dns-6b7b667979-6qkxx\" (UID: \"b7b5b809-50d5-467e-9faf-3d4398f81b15\") " pod="openstack/dnsmasq-dns-6b7b667979-6qkxx" Sep 29 19:16:00 crc kubenswrapper[4792]: I0929 19:16:00.340370 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/b7b5b809-50d5-467e-9faf-3d4398f81b15-ovsdbserver-sb\") pod \"dnsmasq-dns-6b7b667979-6qkxx\" (UID: \"b7b5b809-50d5-467e-9faf-3d4398f81b15\") " pod="openstack/dnsmasq-dns-6b7b667979-6qkxx" Sep 29 19:16:00 crc kubenswrapper[4792]: I0929 19:16:00.340805 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/b7b5b809-50d5-467e-9faf-3d4398f81b15-dns-swift-storage-0\") pod \"dnsmasq-dns-6b7b667979-6qkxx\" (UID: \"b7b5b809-50d5-467e-9faf-3d4398f81b15\") " pod="openstack/dnsmasq-dns-6b7b667979-6qkxx" Sep 29 19:16:00 crc kubenswrapper[4792]: I0929 19:16:00.341071 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b7b5b809-50d5-467e-9faf-3d4398f81b15-config\") pod \"dnsmasq-dns-6b7b667979-6qkxx\" (UID: \"b7b5b809-50d5-467e-9faf-3d4398f81b15\") " pod="openstack/dnsmasq-dns-6b7b667979-6qkxx" Sep 29 19:16:00 crc kubenswrapper[4792]: I0929 19:16:00.365664 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2dfz6\" (UniqueName: \"kubernetes.io/projected/b7b5b809-50d5-467e-9faf-3d4398f81b15-kube-api-access-2dfz6\") pod \"dnsmasq-dns-6b7b667979-6qkxx\" (UID: \"b7b5b809-50d5-467e-9faf-3d4398f81b15\") " pod="openstack/dnsmasq-dns-6b7b667979-6qkxx" Sep 29 19:16:00 crc kubenswrapper[4792]: I0929 19:16:00.420822 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6b7b667979-6qkxx" Sep 29 19:16:00 crc kubenswrapper[4792]: I0929 19:16:00.440102 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e348b56e-dfaa-42ff-b8d4-9ce6d2900cce-combined-ca-bundle\") pod \"neutron-76468fd5f8-gfqwb\" (UID: \"e348b56e-dfaa-42ff-b8d4-9ce6d2900cce\") " pod="openstack/neutron-76468fd5f8-gfqwb" Sep 29 19:16:00 crc kubenswrapper[4792]: I0929 19:16:00.440144 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/e348b56e-dfaa-42ff-b8d4-9ce6d2900cce-httpd-config\") pod \"neutron-76468fd5f8-gfqwb\" (UID: \"e348b56e-dfaa-42ff-b8d4-9ce6d2900cce\") " pod="openstack/neutron-76468fd5f8-gfqwb" Sep 29 19:16:00 crc kubenswrapper[4792]: I0929 19:16:00.440231 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/e348b56e-dfaa-42ff-b8d4-9ce6d2900cce-ovndb-tls-certs\") pod \"neutron-76468fd5f8-gfqwb\" (UID: \"e348b56e-dfaa-42ff-b8d4-9ce6d2900cce\") " pod="openstack/neutron-76468fd5f8-gfqwb" Sep 29 19:16:00 crc kubenswrapper[4792]: I0929 19:16:00.440269 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/e348b56e-dfaa-42ff-b8d4-9ce6d2900cce-config\") pod \"neutron-76468fd5f8-gfqwb\" (UID: \"e348b56e-dfaa-42ff-b8d4-9ce6d2900cce\") " pod="openstack/neutron-76468fd5f8-gfqwb" Sep 29 19:16:00 crc kubenswrapper[4792]: I0929 19:16:00.440298 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2gpzq\" (UniqueName: \"kubernetes.io/projected/e348b56e-dfaa-42ff-b8d4-9ce6d2900cce-kube-api-access-2gpzq\") pod \"neutron-76468fd5f8-gfqwb\" (UID: \"e348b56e-dfaa-42ff-b8d4-9ce6d2900cce\") " pod="openstack/neutron-76468fd5f8-gfqwb" Sep 29 19:16:00 crc kubenswrapper[4792]: I0929 19:16:00.541471 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e348b56e-dfaa-42ff-b8d4-9ce6d2900cce-combined-ca-bundle\") pod \"neutron-76468fd5f8-gfqwb\" (UID: \"e348b56e-dfaa-42ff-b8d4-9ce6d2900cce\") " pod="openstack/neutron-76468fd5f8-gfqwb" Sep 29 19:16:00 crc kubenswrapper[4792]: I0929 19:16:00.541529 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/e348b56e-dfaa-42ff-b8d4-9ce6d2900cce-httpd-config\") pod \"neutron-76468fd5f8-gfqwb\" (UID: \"e348b56e-dfaa-42ff-b8d4-9ce6d2900cce\") " pod="openstack/neutron-76468fd5f8-gfqwb" Sep 29 19:16:00 crc kubenswrapper[4792]: I0929 19:16:00.541620 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/e348b56e-dfaa-42ff-b8d4-9ce6d2900cce-ovndb-tls-certs\") pod \"neutron-76468fd5f8-gfqwb\" (UID: \"e348b56e-dfaa-42ff-b8d4-9ce6d2900cce\") " pod="openstack/neutron-76468fd5f8-gfqwb" Sep 29 19:16:00 crc kubenswrapper[4792]: I0929 19:16:00.541643 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/e348b56e-dfaa-42ff-b8d4-9ce6d2900cce-config\") pod \"neutron-76468fd5f8-gfqwb\" (UID: \"e348b56e-dfaa-42ff-b8d4-9ce6d2900cce\") " pod="openstack/neutron-76468fd5f8-gfqwb" Sep 29 19:16:00 crc kubenswrapper[4792]: I0929 19:16:00.541670 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2gpzq\" (UniqueName: \"kubernetes.io/projected/e348b56e-dfaa-42ff-b8d4-9ce6d2900cce-kube-api-access-2gpzq\") pod \"neutron-76468fd5f8-gfqwb\" (UID: \"e348b56e-dfaa-42ff-b8d4-9ce6d2900cce\") " pod="openstack/neutron-76468fd5f8-gfqwb" Sep 29 19:16:00 crc kubenswrapper[4792]: I0929 19:16:00.546826 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/e348b56e-dfaa-42ff-b8d4-9ce6d2900cce-config\") pod \"neutron-76468fd5f8-gfqwb\" (UID: \"e348b56e-dfaa-42ff-b8d4-9ce6d2900cce\") " pod="openstack/neutron-76468fd5f8-gfqwb" Sep 29 19:16:00 crc kubenswrapper[4792]: I0929 19:16:00.547695 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/e348b56e-dfaa-42ff-b8d4-9ce6d2900cce-ovndb-tls-certs\") pod \"neutron-76468fd5f8-gfqwb\" (UID: \"e348b56e-dfaa-42ff-b8d4-9ce6d2900cce\") " pod="openstack/neutron-76468fd5f8-gfqwb" Sep 29 19:16:00 crc kubenswrapper[4792]: I0929 19:16:00.548068 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/e348b56e-dfaa-42ff-b8d4-9ce6d2900cce-httpd-config\") pod \"neutron-76468fd5f8-gfqwb\" (UID: \"e348b56e-dfaa-42ff-b8d4-9ce6d2900cce\") " pod="openstack/neutron-76468fd5f8-gfqwb" Sep 29 19:16:00 crc kubenswrapper[4792]: I0929 19:16:00.554688 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e348b56e-dfaa-42ff-b8d4-9ce6d2900cce-combined-ca-bundle\") pod \"neutron-76468fd5f8-gfqwb\" (UID: \"e348b56e-dfaa-42ff-b8d4-9ce6d2900cce\") " pod="openstack/neutron-76468fd5f8-gfqwb" Sep 29 19:16:00 crc kubenswrapper[4792]: I0929 19:16:00.563960 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2gpzq\" (UniqueName: \"kubernetes.io/projected/e348b56e-dfaa-42ff-b8d4-9ce6d2900cce-kube-api-access-2gpzq\") pod \"neutron-76468fd5f8-gfqwb\" (UID: \"e348b56e-dfaa-42ff-b8d4-9ce6d2900cce\") " pod="openstack/neutron-76468fd5f8-gfqwb" Sep 29 19:16:00 crc kubenswrapper[4792]: I0929 19:16:00.570182 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-76468fd5f8-gfqwb" Sep 29 19:16:00 crc kubenswrapper[4792]: E0929 19:16:00.621454 4792 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-cinder-api:current-podified" Sep 29 19:16:00 crc kubenswrapper[4792]: E0929 19:16:00.621710 4792 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:cinder-db-sync,Image:quay.io/podified-antelope-centos9/openstack-cinder-api:current-podified,Command:[/bin/bash],Args:[-c /usr/local/bin/kolla_set_configs && /usr/local/bin/kolla_start],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:TRUE,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:etc-machine-id,ReadOnly:true,MountPath:/etc/machine-id,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:scripts,ReadOnly:true,MountPath:/usr/local/bin/container-scripts,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/config-data/merged,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/etc/my.cnf,SubPath:my.cnf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:db-sync-config-data,ReadOnly:true,MountPath:/etc/cinder/cinder.conf.d,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/kolla/config_files/config.json,SubPath:db-sync-config.json,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-hcl6j,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:nil,Privileged:nil,SELinuxOptions:nil,RunAsUser:*0,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod cinder-db-sync-cvlgm_openstack(bd0405ab-8be9-41cd-aa4d-7cbe44be3049): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Sep 29 19:16:00 crc kubenswrapper[4792]: E0929 19:16:00.623929 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"cinder-db-sync\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/cinder-db-sync-cvlgm" podUID="bd0405ab-8be9-41cd-aa4d-7cbe44be3049" Sep 29 19:16:00 crc kubenswrapper[4792]: I0929 19:16:00.712510 4792 scope.go:117] "RemoveContainer" containerID="10a14829912d5ce0bcd8a5cb79adbab5edda76528016781edb2c4edd9bcbb023" Sep 29 19:16:00 crc kubenswrapper[4792]: E0929 19:16:00.713032 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"10a14829912d5ce0bcd8a5cb79adbab5edda76528016781edb2c4edd9bcbb023\": container with ID starting with 10a14829912d5ce0bcd8a5cb79adbab5edda76528016781edb2c4edd9bcbb023 not found: ID does not exist" containerID="10a14829912d5ce0bcd8a5cb79adbab5edda76528016781edb2c4edd9bcbb023" Sep 29 19:16:00 crc kubenswrapper[4792]: I0929 19:16:00.713060 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"10a14829912d5ce0bcd8a5cb79adbab5edda76528016781edb2c4edd9bcbb023"} err="failed to get container status \"10a14829912d5ce0bcd8a5cb79adbab5edda76528016781edb2c4edd9bcbb023\": rpc error: code = NotFound desc = could not find container \"10a14829912d5ce0bcd8a5cb79adbab5edda76528016781edb2c4edd9bcbb023\": container with ID starting with 10a14829912d5ce0bcd8a5cb79adbab5edda76528016781edb2c4edd9bcbb023 not found: ID does not exist" Sep 29 19:16:00 crc kubenswrapper[4792]: I0929 19:16:00.713081 4792 scope.go:117] "RemoveContainer" containerID="cc20a227c88234a2e2f6da7a8eecb6bc5259ad27eb3b3bd89926559b7daeb6c2" Sep 29 19:16:00 crc kubenswrapper[4792]: E0929 19:16:00.713383 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cc20a227c88234a2e2f6da7a8eecb6bc5259ad27eb3b3bd89926559b7daeb6c2\": container with ID starting with cc20a227c88234a2e2f6da7a8eecb6bc5259ad27eb3b3bd89926559b7daeb6c2 not found: ID does not exist" containerID="cc20a227c88234a2e2f6da7a8eecb6bc5259ad27eb3b3bd89926559b7daeb6c2" Sep 29 19:16:00 crc kubenswrapper[4792]: I0929 19:16:00.713404 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cc20a227c88234a2e2f6da7a8eecb6bc5259ad27eb3b3bd89926559b7daeb6c2"} err="failed to get container status \"cc20a227c88234a2e2f6da7a8eecb6bc5259ad27eb3b3bd89926559b7daeb6c2\": rpc error: code = NotFound desc = could not find container \"cc20a227c88234a2e2f6da7a8eecb6bc5259ad27eb3b3bd89926559b7daeb6c2\": container with ID starting with cc20a227c88234a2e2f6da7a8eecb6bc5259ad27eb3b3bd89926559b7daeb6c2 not found: ID does not exist" Sep 29 19:16:00 crc kubenswrapper[4792]: I0929 19:16:00.713418 4792 scope.go:117] "RemoveContainer" containerID="10a14829912d5ce0bcd8a5cb79adbab5edda76528016781edb2c4edd9bcbb023" Sep 29 19:16:00 crc kubenswrapper[4792]: I0929 19:16:00.714311 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"10a14829912d5ce0bcd8a5cb79adbab5edda76528016781edb2c4edd9bcbb023"} err="failed to get container status \"10a14829912d5ce0bcd8a5cb79adbab5edda76528016781edb2c4edd9bcbb023\": rpc error: code = NotFound desc = could not find container \"10a14829912d5ce0bcd8a5cb79adbab5edda76528016781edb2c4edd9bcbb023\": container with ID starting with 10a14829912d5ce0bcd8a5cb79adbab5edda76528016781edb2c4edd9bcbb023 not found: ID does not exist" Sep 29 19:16:00 crc kubenswrapper[4792]: I0929 19:16:00.714339 4792 scope.go:117] "RemoveContainer" containerID="cc20a227c88234a2e2f6da7a8eecb6bc5259ad27eb3b3bd89926559b7daeb6c2" Sep 29 19:16:00 crc kubenswrapper[4792]: I0929 19:16:00.715020 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cc20a227c88234a2e2f6da7a8eecb6bc5259ad27eb3b3bd89926559b7daeb6c2"} err="failed to get container status \"cc20a227c88234a2e2f6da7a8eecb6bc5259ad27eb3b3bd89926559b7daeb6c2\": rpc error: code = NotFound desc = could not find container \"cc20a227c88234a2e2f6da7a8eecb6bc5259ad27eb3b3bd89926559b7daeb6c2\": container with ID starting with cc20a227c88234a2e2f6da7a8eecb6bc5259ad27eb3b3bd89926559b7daeb6c2 not found: ID does not exist" Sep 29 19:16:00 crc kubenswrapper[4792]: I0929 19:16:00.715041 4792 scope.go:117] "RemoveContainer" containerID="6eeae45a37103445a6a2f460f1f95dafd6c7ef5bbf1694f76fa12743cf24ee2a" Sep 29 19:16:00 crc kubenswrapper[4792]: I0929 19:16:00.809212 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-575755669c-6b5g8" Sep 29 19:16:00 crc kubenswrapper[4792]: I0929 19:16:00.833171 4792 scope.go:117] "RemoveContainer" containerID="9ec7c8564a4556dbcf9f7aa440a25c204b9b08c4ad019c1752f20177e725f362" Sep 29 19:16:00 crc kubenswrapper[4792]: I0929 19:16:00.852766 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/03017320-13cd-4880-80e9-0834cb41a6bd-scripts\") pod \"03017320-13cd-4880-80e9-0834cb41a6bd\" (UID: \"03017320-13cd-4880-80e9-0834cb41a6bd\") " Sep 29 19:16:00 crc kubenswrapper[4792]: I0929 19:16:00.852843 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/03017320-13cd-4880-80e9-0834cb41a6bd-logs\") pod \"03017320-13cd-4880-80e9-0834cb41a6bd\" (UID: \"03017320-13cd-4880-80e9-0834cb41a6bd\") " Sep 29 19:16:00 crc kubenswrapper[4792]: I0929 19:16:00.852885 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/03017320-13cd-4880-80e9-0834cb41a6bd-config-data\") pod \"03017320-13cd-4880-80e9-0834cb41a6bd\" (UID: \"03017320-13cd-4880-80e9-0834cb41a6bd\") " Sep 29 19:16:00 crc kubenswrapper[4792]: I0929 19:16:00.852950 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/03017320-13cd-4880-80e9-0834cb41a6bd-horizon-secret-key\") pod \"03017320-13cd-4880-80e9-0834cb41a6bd\" (UID: \"03017320-13cd-4880-80e9-0834cb41a6bd\") " Sep 29 19:16:00 crc kubenswrapper[4792]: I0929 19:16:00.853485 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/03017320-13cd-4880-80e9-0834cb41a6bd-logs" (OuterVolumeSpecName: "logs") pod "03017320-13cd-4880-80e9-0834cb41a6bd" (UID: "03017320-13cd-4880-80e9-0834cb41a6bd"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 19:16:00 crc kubenswrapper[4792]: I0929 19:16:00.854126 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/03017320-13cd-4880-80e9-0834cb41a6bd-scripts" (OuterVolumeSpecName: "scripts") pod "03017320-13cd-4880-80e9-0834cb41a6bd" (UID: "03017320-13cd-4880-80e9-0834cb41a6bd"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:16:00 crc kubenswrapper[4792]: I0929 19:16:00.870116 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6npsj\" (UniqueName: \"kubernetes.io/projected/03017320-13cd-4880-80e9-0834cb41a6bd-kube-api-access-6npsj\") pod \"03017320-13cd-4880-80e9-0834cb41a6bd\" (UID: \"03017320-13cd-4880-80e9-0834cb41a6bd\") " Sep 29 19:16:00 crc kubenswrapper[4792]: I0929 19:16:00.871685 4792 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/03017320-13cd-4880-80e9-0834cb41a6bd-scripts\") on node \"crc\" DevicePath \"\"" Sep 29 19:16:00 crc kubenswrapper[4792]: I0929 19:16:00.871702 4792 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/03017320-13cd-4880-80e9-0834cb41a6bd-logs\") on node \"crc\" DevicePath \"\"" Sep 29 19:16:00 crc kubenswrapper[4792]: I0929 19:16:00.870257 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/03017320-13cd-4880-80e9-0834cb41a6bd-config-data" (OuterVolumeSpecName: "config-data") pod "03017320-13cd-4880-80e9-0834cb41a6bd" (UID: "03017320-13cd-4880-80e9-0834cb41a6bd"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:16:00 crc kubenswrapper[4792]: I0929 19:16:00.893707 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/03017320-13cd-4880-80e9-0834cb41a6bd-kube-api-access-6npsj" (OuterVolumeSpecName: "kube-api-access-6npsj") pod "03017320-13cd-4880-80e9-0834cb41a6bd" (UID: "03017320-13cd-4880-80e9-0834cb41a6bd"). InnerVolumeSpecName "kube-api-access-6npsj". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:16:00 crc kubenswrapper[4792]: I0929 19:16:00.922939 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/03017320-13cd-4880-80e9-0834cb41a6bd-horizon-secret-key" (OuterVolumeSpecName: "horizon-secret-key") pod "03017320-13cd-4880-80e9-0834cb41a6bd" (UID: "03017320-13cd-4880-80e9-0834cb41a6bd"). InnerVolumeSpecName "horizon-secret-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:16:00 crc kubenswrapper[4792]: I0929 19:16:00.989906 4792 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/03017320-13cd-4880-80e9-0834cb41a6bd-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 19:16:00 crc kubenswrapper[4792]: I0929 19:16:00.990344 4792 reconciler_common.go:293] "Volume detached for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/03017320-13cd-4880-80e9-0834cb41a6bd-horizon-secret-key\") on node \"crc\" DevicePath \"\"" Sep 29 19:16:00 crc kubenswrapper[4792]: I0929 19:16:00.990419 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6npsj\" (UniqueName: \"kubernetes.io/projected/03017320-13cd-4880-80e9-0834cb41a6bd-kube-api-access-6npsj\") on node \"crc\" DevicePath \"\"" Sep 29 19:16:01 crc kubenswrapper[4792]: I0929 19:16:00.999054 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-7486b4cb6c-fr9s4" Sep 29 19:16:01 crc kubenswrapper[4792]: I0929 19:16:01.019145 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-79f9946749-wjkgr" Sep 29 19:16:01 crc kubenswrapper[4792]: I0929 19:16:01.085689 4792 scope.go:117] "RemoveContainer" containerID="9916413a2cc81dab7505eced696984a99cc43dd3bc6234a6ccf8a181a76593a2" Sep 29 19:16:01 crc kubenswrapper[4792]: I0929 19:16:01.091379 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/9953fe09-a48c-4c74-83a1-9de5e8cec46d-config-data\") pod \"9953fe09-a48c-4c74-83a1-9de5e8cec46d\" (UID: \"9953fe09-a48c-4c74-83a1-9de5e8cec46d\") " Sep 29 19:16:01 crc kubenswrapper[4792]: I0929 19:16:01.091458 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9953fe09-a48c-4c74-83a1-9de5e8cec46d-logs\") pod \"9953fe09-a48c-4c74-83a1-9de5e8cec46d\" (UID: \"9953fe09-a48c-4c74-83a1-9de5e8cec46d\") " Sep 29 19:16:01 crc kubenswrapper[4792]: I0929 19:16:01.091501 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/c1df6cb2-f030-42cc-8e73-e1cbfd6d55ef-horizon-secret-key\") pod \"c1df6cb2-f030-42cc-8e73-e1cbfd6d55ef\" (UID: \"c1df6cb2-f030-42cc-8e73-e1cbfd6d55ef\") " Sep 29 19:16:01 crc kubenswrapper[4792]: I0929 19:16:01.091561 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c1df6cb2-f030-42cc-8e73-e1cbfd6d55ef-logs\") pod \"c1df6cb2-f030-42cc-8e73-e1cbfd6d55ef\" (UID: \"c1df6cb2-f030-42cc-8e73-e1cbfd6d55ef\") " Sep 29 19:16:01 crc kubenswrapper[4792]: I0929 19:16:01.091612 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/9953fe09-a48c-4c74-83a1-9de5e8cec46d-scripts\") pod \"9953fe09-a48c-4c74-83a1-9de5e8cec46d\" (UID: \"9953fe09-a48c-4c74-83a1-9de5e8cec46d\") " Sep 29 19:16:01 crc kubenswrapper[4792]: I0929 19:16:01.091649 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gsdpg\" (UniqueName: \"kubernetes.io/projected/c1df6cb2-f030-42cc-8e73-e1cbfd6d55ef-kube-api-access-gsdpg\") pod \"c1df6cb2-f030-42cc-8e73-e1cbfd6d55ef\" (UID: \"c1df6cb2-f030-42cc-8e73-e1cbfd6d55ef\") " Sep 29 19:16:01 crc kubenswrapper[4792]: I0929 19:16:01.091671 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/9953fe09-a48c-4c74-83a1-9de5e8cec46d-horizon-secret-key\") pod \"9953fe09-a48c-4c74-83a1-9de5e8cec46d\" (UID: \"9953fe09-a48c-4c74-83a1-9de5e8cec46d\") " Sep 29 19:16:01 crc kubenswrapper[4792]: I0929 19:16:01.091698 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/c1df6cb2-f030-42cc-8e73-e1cbfd6d55ef-config-data\") pod \"c1df6cb2-f030-42cc-8e73-e1cbfd6d55ef\" (UID: \"c1df6cb2-f030-42cc-8e73-e1cbfd6d55ef\") " Sep 29 19:16:01 crc kubenswrapper[4792]: I0929 19:16:01.091738 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/c1df6cb2-f030-42cc-8e73-e1cbfd6d55ef-scripts\") pod \"c1df6cb2-f030-42cc-8e73-e1cbfd6d55ef\" (UID: \"c1df6cb2-f030-42cc-8e73-e1cbfd6d55ef\") " Sep 29 19:16:01 crc kubenswrapper[4792]: I0929 19:16:01.091803 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-h8tnq\" (UniqueName: \"kubernetes.io/projected/9953fe09-a48c-4c74-83a1-9de5e8cec46d-kube-api-access-h8tnq\") pod \"9953fe09-a48c-4c74-83a1-9de5e8cec46d\" (UID: \"9953fe09-a48c-4c74-83a1-9de5e8cec46d\") " Sep 29 19:16:01 crc kubenswrapper[4792]: I0929 19:16:01.092102 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9953fe09-a48c-4c74-83a1-9de5e8cec46d-logs" (OuterVolumeSpecName: "logs") pod "9953fe09-a48c-4c74-83a1-9de5e8cec46d" (UID: "9953fe09-a48c-4c74-83a1-9de5e8cec46d"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 19:16:01 crc kubenswrapper[4792]: I0929 19:16:01.092282 4792 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9953fe09-a48c-4c74-83a1-9de5e8cec46d-logs\") on node \"crc\" DevicePath \"\"" Sep 29 19:16:01 crc kubenswrapper[4792]: I0929 19:16:01.093076 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c1df6cb2-f030-42cc-8e73-e1cbfd6d55ef-config-data" (OuterVolumeSpecName: "config-data") pod "c1df6cb2-f030-42cc-8e73-e1cbfd6d55ef" (UID: "c1df6cb2-f030-42cc-8e73-e1cbfd6d55ef"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:16:01 crc kubenswrapper[4792]: I0929 19:16:01.093911 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c1df6cb2-f030-42cc-8e73-e1cbfd6d55ef-logs" (OuterVolumeSpecName: "logs") pod "c1df6cb2-f030-42cc-8e73-e1cbfd6d55ef" (UID: "c1df6cb2-f030-42cc-8e73-e1cbfd6d55ef"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 19:16:01 crc kubenswrapper[4792]: I0929 19:16:01.094373 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9953fe09-a48c-4c74-83a1-9de5e8cec46d-scripts" (OuterVolumeSpecName: "scripts") pod "9953fe09-a48c-4c74-83a1-9de5e8cec46d" (UID: "9953fe09-a48c-4c74-83a1-9de5e8cec46d"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:16:01 crc kubenswrapper[4792]: I0929 19:16:01.096088 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c1df6cb2-f030-42cc-8e73-e1cbfd6d55ef-scripts" (OuterVolumeSpecName: "scripts") pod "c1df6cb2-f030-42cc-8e73-e1cbfd6d55ef" (UID: "c1df6cb2-f030-42cc-8e73-e1cbfd6d55ef"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:16:01 crc kubenswrapper[4792]: I0929 19:16:01.099412 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9953fe09-a48c-4c74-83a1-9de5e8cec46d-config-data" (OuterVolumeSpecName: "config-data") pod "9953fe09-a48c-4c74-83a1-9de5e8cec46d" (UID: "9953fe09-a48c-4c74-83a1-9de5e8cec46d"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:16:01 crc kubenswrapper[4792]: I0929 19:16:01.106436 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9953fe09-a48c-4c74-83a1-9de5e8cec46d-horizon-secret-key" (OuterVolumeSpecName: "horizon-secret-key") pod "9953fe09-a48c-4c74-83a1-9de5e8cec46d" (UID: "9953fe09-a48c-4c74-83a1-9de5e8cec46d"). InnerVolumeSpecName "horizon-secret-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:16:01 crc kubenswrapper[4792]: I0929 19:16:01.110267 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9953fe09-a48c-4c74-83a1-9de5e8cec46d-kube-api-access-h8tnq" (OuterVolumeSpecName: "kube-api-access-h8tnq") pod "9953fe09-a48c-4c74-83a1-9de5e8cec46d" (UID: "9953fe09-a48c-4c74-83a1-9de5e8cec46d"). InnerVolumeSpecName "kube-api-access-h8tnq". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:16:01 crc kubenswrapper[4792]: I0929 19:16:01.113988 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c1df6cb2-f030-42cc-8e73-e1cbfd6d55ef-kube-api-access-gsdpg" (OuterVolumeSpecName: "kube-api-access-gsdpg") pod "c1df6cb2-f030-42cc-8e73-e1cbfd6d55ef" (UID: "c1df6cb2-f030-42cc-8e73-e1cbfd6d55ef"). InnerVolumeSpecName "kube-api-access-gsdpg". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:16:01 crc kubenswrapper[4792]: I0929 19:16:01.115536 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c1df6cb2-f030-42cc-8e73-e1cbfd6d55ef-horizon-secret-key" (OuterVolumeSpecName: "horizon-secret-key") pod "c1df6cb2-f030-42cc-8e73-e1cbfd6d55ef" (UID: "c1df6cb2-f030-42cc-8e73-e1cbfd6d55ef"). InnerVolumeSpecName "horizon-secret-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:16:01 crc kubenswrapper[4792]: I0929 19:16:01.202256 4792 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/9953fe09-a48c-4c74-83a1-9de5e8cec46d-scripts\") on node \"crc\" DevicePath \"\"" Sep 29 19:16:01 crc kubenswrapper[4792]: I0929 19:16:01.202286 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gsdpg\" (UniqueName: \"kubernetes.io/projected/c1df6cb2-f030-42cc-8e73-e1cbfd6d55ef-kube-api-access-gsdpg\") on node \"crc\" DevicePath \"\"" Sep 29 19:16:01 crc kubenswrapper[4792]: I0929 19:16:01.202299 4792 reconciler_common.go:293] "Volume detached for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/9953fe09-a48c-4c74-83a1-9de5e8cec46d-horizon-secret-key\") on node \"crc\" DevicePath \"\"" Sep 29 19:16:01 crc kubenswrapper[4792]: I0929 19:16:01.202308 4792 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/c1df6cb2-f030-42cc-8e73-e1cbfd6d55ef-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 19:16:01 crc kubenswrapper[4792]: I0929 19:16:01.202321 4792 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/c1df6cb2-f030-42cc-8e73-e1cbfd6d55ef-scripts\") on node \"crc\" DevicePath \"\"" Sep 29 19:16:01 crc kubenswrapper[4792]: I0929 19:16:01.202329 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-h8tnq\" (UniqueName: \"kubernetes.io/projected/9953fe09-a48c-4c74-83a1-9de5e8cec46d-kube-api-access-h8tnq\") on node \"crc\" DevicePath \"\"" Sep 29 19:16:01 crc kubenswrapper[4792]: I0929 19:16:01.202336 4792 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/9953fe09-a48c-4c74-83a1-9de5e8cec46d-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 19:16:01 crc kubenswrapper[4792]: I0929 19:16:01.202347 4792 reconciler_common.go:293] "Volume detached for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/c1df6cb2-f030-42cc-8e73-e1cbfd6d55ef-horizon-secret-key\") on node \"crc\" DevicePath \"\"" Sep 29 19:16:01 crc kubenswrapper[4792]: I0929 19:16:01.202354 4792 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c1df6cb2-f030-42cc-8e73-e1cbfd6d55ef-logs\") on node \"crc\" DevicePath \"\"" Sep 29 19:16:01 crc kubenswrapper[4792]: I0929 19:16:01.210385 4792 scope.go:117] "RemoveContainer" containerID="0ca9398d6a414aca77df34ba76c145ec1bb7471232b87ae9e57d36750f3b4bb1" Sep 29 19:16:01 crc kubenswrapper[4792]: I0929 19:16:01.449369 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-8494dffd6-7rx5p"] Sep 29 19:16:01 crc kubenswrapper[4792]: I0929 19:16:01.500901 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-7486b4cb6c-fr9s4" event={"ID":"9953fe09-a48c-4c74-83a1-9de5e8cec46d","Type":"ContainerDied","Data":"03d246cacb1bd0e828f9a741d435516977747e91531292610332fb7ca838f15e"} Sep 29 19:16:01 crc kubenswrapper[4792]: I0929 19:16:01.501024 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-7486b4cb6c-fr9s4" Sep 29 19:16:01 crc kubenswrapper[4792]: I0929 19:16:01.527064 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-575755669c-6b5g8" event={"ID":"03017320-13cd-4880-80e9-0834cb41a6bd","Type":"ContainerDied","Data":"a0e637e7475014491d3582c41be596b098448686a624731f816a04e2565cefdd"} Sep 29 19:16:01 crc kubenswrapper[4792]: I0929 19:16:01.527168 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-575755669c-6b5g8" Sep 29 19:16:01 crc kubenswrapper[4792]: I0929 19:16:01.536767 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-79f9946749-wjkgr" Sep 29 19:16:01 crc kubenswrapper[4792]: I0929 19:16:01.539149 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-79f9946749-wjkgr" event={"ID":"c1df6cb2-f030-42cc-8e73-e1cbfd6d55ef","Type":"ContainerDied","Data":"facaa116af583eb6a28415b5f8bf5fc447fe664a35eedb0491a6effbed125dce"} Sep 29 19:16:01 crc kubenswrapper[4792]: E0929 19:16:01.542367 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"cinder-db-sync\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-cinder-api:current-podified\\\"\"" pod="openstack/cinder-db-sync-cvlgm" podUID="bd0405ab-8be9-41cd-aa4d-7cbe44be3049" Sep 29 19:16:01 crc kubenswrapper[4792]: I0929 19:16:01.645388 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Sep 29 19:16:01 crc kubenswrapper[4792]: I0929 19:16:01.657223 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-575755669c-6b5g8"] Sep 29 19:16:01 crc kubenswrapper[4792]: I0929 19:16:01.670737 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/horizon-575755669c-6b5g8"] Sep 29 19:16:01 crc kubenswrapper[4792]: I0929 19:16:01.703817 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-7486b4cb6c-fr9s4"] Sep 29 19:16:01 crc kubenswrapper[4792]: I0929 19:16:01.711382 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/horizon-7486b4cb6c-fr9s4"] Sep 29 19:16:01 crc kubenswrapper[4792]: I0929 19:16:01.731146 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-79f9946749-wjkgr"] Sep 29 19:16:01 crc kubenswrapper[4792]: I0929 19:16:01.738782 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/horizon-79f9946749-wjkgr"] Sep 29 19:16:01 crc kubenswrapper[4792]: I0929 19:16:01.833081 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-dfd9c6b56-wq84c"] Sep 29 19:16:01 crc kubenswrapper[4792]: I0929 19:16:01.916735 4792 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-b8fbc5445-mgs5n" podUID="e4b273af-3752-4d48-ae80-4d639b06e836" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.117:5353: i/o timeout" Sep 29 19:16:01 crc kubenswrapper[4792]: I0929 19:16:01.964271 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Sep 29 19:16:02 crc kubenswrapper[4792]: I0929 19:16:02.001933 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6b7b667979-6qkxx"] Sep 29 19:16:02 crc kubenswrapper[4792]: I0929 19:16:02.012590 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-5vlr7"] Sep 29 19:16:02 crc kubenswrapper[4792]: I0929 19:16:02.142361 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-76468fd5f8-gfqwb"] Sep 29 19:16:02 crc kubenswrapper[4792]: I0929 19:16:02.554041 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-76468fd5f8-gfqwb" event={"ID":"e348b56e-dfaa-42ff-b8d4-9ce6d2900cce","Type":"ContainerStarted","Data":"175d124e38feb4de4f16828fc99f2cf7e86deb78620e9e79adee38b902a28506"} Sep 29 19:16:02 crc kubenswrapper[4792]: I0929 19:16:02.554357 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-76468fd5f8-gfqwb" event={"ID":"e348b56e-dfaa-42ff-b8d4-9ce6d2900cce","Type":"ContainerStarted","Data":"d03ef07057c448c6e75d2a16cb40d7a5a1473151775904da4ef7a87d77fe1e56"} Sep 29 19:16:02 crc kubenswrapper[4792]: I0929 19:16:02.557269 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-5vlr7" event={"ID":"1d12f836-d8e3-46a3-bc92-64dae426f114","Type":"ContainerStarted","Data":"458f42dbafa406e8067bc4be0137d6c9a4b875c15a1507d9300b9941e91ba73f"} Sep 29 19:16:02 crc kubenswrapper[4792]: I0929 19:16:02.557303 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-5vlr7" event={"ID":"1d12f836-d8e3-46a3-bc92-64dae426f114","Type":"ContainerStarted","Data":"0efb192d35dd984724a43c372b2c60835d8bc129073df8629d347e3e18914f79"} Sep 29 19:16:02 crc kubenswrapper[4792]: I0929 19:16:02.559879 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"5ca97462-e055-4294-856f-895f0be44759","Type":"ContainerStarted","Data":"2555f855ff78c563addd07979b9c2662b152cc0f852f540813c6cd5f7a3efd1b"} Sep 29 19:16:02 crc kubenswrapper[4792]: I0929 19:16:02.562920 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-dfd9c6b56-wq84c" event={"ID":"eb79e1ef-cf0c-407b-9b37-c7ad8d65a3cc","Type":"ContainerStarted","Data":"44e56ba45bc8ca2ce2011cd32946ce2f00f01164740cc7be4438b0ed89860256"} Sep 29 19:16:02 crc kubenswrapper[4792]: I0929 19:16:02.582455 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-bootstrap-5vlr7" podStartSLOduration=24.582436434999998 podStartE2EDuration="24.582436435s" podCreationTimestamp="2025-09-29 19:15:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 19:16:02.575840329 +0000 UTC m=+1174.569147745" watchObservedRunningTime="2025-09-29 19:16:02.582436435 +0000 UTC m=+1174.575743831" Sep 29 19:16:02 crc kubenswrapper[4792]: I0929 19:16:02.605209 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-5bsn5" event={"ID":"3c3fc253-fb19-4845-a099-4754b7a55cdb","Type":"ContainerStarted","Data":"3feeb60329406e8936b0b75192cd78cb94d59aca2dca3a89c2f5d7219e9eeb9a"} Sep 29 19:16:02 crc kubenswrapper[4792]: I0929 19:16:02.615415 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"cf2861f4-a8d6-4c0e-bb03-bd2c51def90a","Type":"ContainerStarted","Data":"7deee0a366d3a3c058795f4201190c88ab6049a482b90a77ca98f34fce04500e"} Sep 29 19:16:02 crc kubenswrapper[4792]: I0929 19:16:02.625651 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-qwkmr" event={"ID":"b45ae86a-64ce-47be-a568-021cf9da5107","Type":"ContainerStarted","Data":"669c7cc1f802e3c741ab812ebea14d9a705e2cf6d0558e155f535ddf3ec6e8fd"} Sep 29 19:16:02 crc kubenswrapper[4792]: I0929 19:16:02.632763 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-8494dffd6-7rx5p" event={"ID":"23845288-b122-49f0-b10d-641cfb94b66f","Type":"ContainerStarted","Data":"cdcddef98e6755828ad58d48102f5c4b428931ea7dd332397066865733bf0229"} Sep 29 19:16:02 crc kubenswrapper[4792]: I0929 19:16:02.638587 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6b7b667979-6qkxx" event={"ID":"b7b5b809-50d5-467e-9faf-3d4398f81b15","Type":"ContainerStarted","Data":"1d4ce2e04b7c26d2f14bfb0a28b44cb95a1d7306c0954008ee37f871280590d3"} Sep 29 19:16:02 crc kubenswrapper[4792]: I0929 19:16:02.646655 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-db-sync-5bsn5" podStartSLOduration=4.18073015 podStartE2EDuration="39.646637187s" podCreationTimestamp="2025-09-29 19:15:23 +0000 UTC" firstStartedPulling="2025-09-29 19:15:25.098392976 +0000 UTC m=+1137.091700372" lastFinishedPulling="2025-09-29 19:16:00.564300013 +0000 UTC m=+1172.557607409" observedRunningTime="2025-09-29 19:16:02.642137087 +0000 UTC m=+1174.635444483" watchObservedRunningTime="2025-09-29 19:16:02.646637187 +0000 UTC m=+1174.639944583" Sep 29 19:16:02 crc kubenswrapper[4792]: I0929 19:16:02.710656 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-db-sync-qwkmr" podStartSLOduration=4.101661882 podStartE2EDuration="39.710600853s" podCreationTimestamp="2025-09-29 19:15:23 +0000 UTC" firstStartedPulling="2025-09-29 19:15:24.995363229 +0000 UTC m=+1136.988670625" lastFinishedPulling="2025-09-29 19:16:00.6043022 +0000 UTC m=+1172.597609596" observedRunningTime="2025-09-29 19:16:02.695977323 +0000 UTC m=+1174.689284739" watchObservedRunningTime="2025-09-29 19:16:02.710600853 +0000 UTC m=+1174.703908249" Sep 29 19:16:03 crc kubenswrapper[4792]: I0929 19:16:03.034095 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="03017320-13cd-4880-80e9-0834cb41a6bd" path="/var/lib/kubelet/pods/03017320-13cd-4880-80e9-0834cb41a6bd/volumes" Sep 29 19:16:03 crc kubenswrapper[4792]: I0929 19:16:03.034928 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9953fe09-a48c-4c74-83a1-9de5e8cec46d" path="/var/lib/kubelet/pods/9953fe09-a48c-4c74-83a1-9de5e8cec46d/volumes" Sep 29 19:16:03 crc kubenswrapper[4792]: I0929 19:16:03.035435 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c1df6cb2-f030-42cc-8e73-e1cbfd6d55ef" path="/var/lib/kubelet/pods/c1df6cb2-f030-42cc-8e73-e1cbfd6d55ef/volumes" Sep 29 19:16:03 crc kubenswrapper[4792]: I0929 19:16:03.687370 4792 generic.go:334] "Generic (PLEG): container finished" podID="b7b5b809-50d5-467e-9faf-3d4398f81b15" containerID="7c4b3d78fa3901e2476b72e1db0c88d929066f3ea05ce3ffb80a100285a57150" exitCode=0 Sep 29 19:16:03 crc kubenswrapper[4792]: I0929 19:16:03.687744 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6b7b667979-6qkxx" event={"ID":"b7b5b809-50d5-467e-9faf-3d4398f81b15","Type":"ContainerDied","Data":"7c4b3d78fa3901e2476b72e1db0c88d929066f3ea05ce3ffb80a100285a57150"} Sep 29 19:16:03 crc kubenswrapper[4792]: I0929 19:16:03.694045 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"cf2861f4-a8d6-4c0e-bb03-bd2c51def90a","Type":"ContainerStarted","Data":"f9c1c5f9bbf627d9bfed230d14685cf3c86bce16c947782b80e9d155d4580e61"} Sep 29 19:16:03 crc kubenswrapper[4792]: I0929 19:16:03.725112 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"5ca97462-e055-4294-856f-895f0be44759","Type":"ContainerStarted","Data":"f57851132b296920e829d7a4823ab83290b8c0d33e7fd79268fdd79403bb4d69"} Sep 29 19:16:03 crc kubenswrapper[4792]: I0929 19:16:03.817577 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-554fb67fd9-fh25j"] Sep 29 19:16:03 crc kubenswrapper[4792]: I0929 19:16:03.819325 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-554fb67fd9-fh25j" Sep 29 19:16:03 crc kubenswrapper[4792]: I0929 19:16:03.834325 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-public-svc" Sep 29 19:16:03 crc kubenswrapper[4792]: I0929 19:16:03.834259 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-internal-svc" Sep 29 19:16:03 crc kubenswrapper[4792]: I0929 19:16:03.878983 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/015e1c15-2d65-42f6-8883-b0be2b5dc0ef-ovndb-tls-certs\") pod \"neutron-554fb67fd9-fh25j\" (UID: \"015e1c15-2d65-42f6-8883-b0be2b5dc0ef\") " pod="openstack/neutron-554fb67fd9-fh25j" Sep 29 19:16:03 crc kubenswrapper[4792]: I0929 19:16:03.879599 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/015e1c15-2d65-42f6-8883-b0be2b5dc0ef-httpd-config\") pod \"neutron-554fb67fd9-fh25j\" (UID: \"015e1c15-2d65-42f6-8883-b0be2b5dc0ef\") " pod="openstack/neutron-554fb67fd9-fh25j" Sep 29 19:16:03 crc kubenswrapper[4792]: I0929 19:16:03.879729 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/015e1c15-2d65-42f6-8883-b0be2b5dc0ef-public-tls-certs\") pod \"neutron-554fb67fd9-fh25j\" (UID: \"015e1c15-2d65-42f6-8883-b0be2b5dc0ef\") " pod="openstack/neutron-554fb67fd9-fh25j" Sep 29 19:16:03 crc kubenswrapper[4792]: I0929 19:16:03.879820 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/015e1c15-2d65-42f6-8883-b0be2b5dc0ef-config\") pod \"neutron-554fb67fd9-fh25j\" (UID: \"015e1c15-2d65-42f6-8883-b0be2b5dc0ef\") " pod="openstack/neutron-554fb67fd9-fh25j" Sep 29 19:16:03 crc kubenswrapper[4792]: I0929 19:16:03.879902 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/015e1c15-2d65-42f6-8883-b0be2b5dc0ef-internal-tls-certs\") pod \"neutron-554fb67fd9-fh25j\" (UID: \"015e1c15-2d65-42f6-8883-b0be2b5dc0ef\") " pod="openstack/neutron-554fb67fd9-fh25j" Sep 29 19:16:03 crc kubenswrapper[4792]: I0929 19:16:03.880019 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wjgzk\" (UniqueName: \"kubernetes.io/projected/015e1c15-2d65-42f6-8883-b0be2b5dc0ef-kube-api-access-wjgzk\") pod \"neutron-554fb67fd9-fh25j\" (UID: \"015e1c15-2d65-42f6-8883-b0be2b5dc0ef\") " pod="openstack/neutron-554fb67fd9-fh25j" Sep 29 19:16:03 crc kubenswrapper[4792]: I0929 19:16:03.880102 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/015e1c15-2d65-42f6-8883-b0be2b5dc0ef-combined-ca-bundle\") pod \"neutron-554fb67fd9-fh25j\" (UID: \"015e1c15-2d65-42f6-8883-b0be2b5dc0ef\") " pod="openstack/neutron-554fb67fd9-fh25j" Sep 29 19:16:03 crc kubenswrapper[4792]: I0929 19:16:03.882468 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-554fb67fd9-fh25j"] Sep 29 19:16:03 crc kubenswrapper[4792]: I0929 19:16:03.981454 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wjgzk\" (UniqueName: \"kubernetes.io/projected/015e1c15-2d65-42f6-8883-b0be2b5dc0ef-kube-api-access-wjgzk\") pod \"neutron-554fb67fd9-fh25j\" (UID: \"015e1c15-2d65-42f6-8883-b0be2b5dc0ef\") " pod="openstack/neutron-554fb67fd9-fh25j" Sep 29 19:16:03 crc kubenswrapper[4792]: I0929 19:16:03.981495 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/015e1c15-2d65-42f6-8883-b0be2b5dc0ef-combined-ca-bundle\") pod \"neutron-554fb67fd9-fh25j\" (UID: \"015e1c15-2d65-42f6-8883-b0be2b5dc0ef\") " pod="openstack/neutron-554fb67fd9-fh25j" Sep 29 19:16:03 crc kubenswrapper[4792]: I0929 19:16:03.981535 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/015e1c15-2d65-42f6-8883-b0be2b5dc0ef-ovndb-tls-certs\") pod \"neutron-554fb67fd9-fh25j\" (UID: \"015e1c15-2d65-42f6-8883-b0be2b5dc0ef\") " pod="openstack/neutron-554fb67fd9-fh25j" Sep 29 19:16:03 crc kubenswrapper[4792]: I0929 19:16:03.981572 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/015e1c15-2d65-42f6-8883-b0be2b5dc0ef-httpd-config\") pod \"neutron-554fb67fd9-fh25j\" (UID: \"015e1c15-2d65-42f6-8883-b0be2b5dc0ef\") " pod="openstack/neutron-554fb67fd9-fh25j" Sep 29 19:16:03 crc kubenswrapper[4792]: I0929 19:16:03.981625 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/015e1c15-2d65-42f6-8883-b0be2b5dc0ef-public-tls-certs\") pod \"neutron-554fb67fd9-fh25j\" (UID: \"015e1c15-2d65-42f6-8883-b0be2b5dc0ef\") " pod="openstack/neutron-554fb67fd9-fh25j" Sep 29 19:16:03 crc kubenswrapper[4792]: I0929 19:16:03.981651 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/015e1c15-2d65-42f6-8883-b0be2b5dc0ef-config\") pod \"neutron-554fb67fd9-fh25j\" (UID: \"015e1c15-2d65-42f6-8883-b0be2b5dc0ef\") " pod="openstack/neutron-554fb67fd9-fh25j" Sep 29 19:16:03 crc kubenswrapper[4792]: I0929 19:16:03.981665 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/015e1c15-2d65-42f6-8883-b0be2b5dc0ef-internal-tls-certs\") pod \"neutron-554fb67fd9-fh25j\" (UID: \"015e1c15-2d65-42f6-8883-b0be2b5dc0ef\") " pod="openstack/neutron-554fb67fd9-fh25j" Sep 29 19:16:04 crc kubenswrapper[4792]: I0929 19:16:04.043931 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/015e1c15-2d65-42f6-8883-b0be2b5dc0ef-internal-tls-certs\") pod \"neutron-554fb67fd9-fh25j\" (UID: \"015e1c15-2d65-42f6-8883-b0be2b5dc0ef\") " pod="openstack/neutron-554fb67fd9-fh25j" Sep 29 19:16:04 crc kubenswrapper[4792]: I0929 19:16:04.044223 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/015e1c15-2d65-42f6-8883-b0be2b5dc0ef-config\") pod \"neutron-554fb67fd9-fh25j\" (UID: \"015e1c15-2d65-42f6-8883-b0be2b5dc0ef\") " pod="openstack/neutron-554fb67fd9-fh25j" Sep 29 19:16:04 crc kubenswrapper[4792]: I0929 19:16:04.044389 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/015e1c15-2d65-42f6-8883-b0be2b5dc0ef-httpd-config\") pod \"neutron-554fb67fd9-fh25j\" (UID: \"015e1c15-2d65-42f6-8883-b0be2b5dc0ef\") " pod="openstack/neutron-554fb67fd9-fh25j" Sep 29 19:16:04 crc kubenswrapper[4792]: I0929 19:16:04.045650 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/015e1c15-2d65-42f6-8883-b0be2b5dc0ef-combined-ca-bundle\") pod \"neutron-554fb67fd9-fh25j\" (UID: \"015e1c15-2d65-42f6-8883-b0be2b5dc0ef\") " pod="openstack/neutron-554fb67fd9-fh25j" Sep 29 19:16:04 crc kubenswrapper[4792]: I0929 19:16:04.046807 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/015e1c15-2d65-42f6-8883-b0be2b5dc0ef-public-tls-certs\") pod \"neutron-554fb67fd9-fh25j\" (UID: \"015e1c15-2d65-42f6-8883-b0be2b5dc0ef\") " pod="openstack/neutron-554fb67fd9-fh25j" Sep 29 19:16:04 crc kubenswrapper[4792]: I0929 19:16:04.050421 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/015e1c15-2d65-42f6-8883-b0be2b5dc0ef-ovndb-tls-certs\") pod \"neutron-554fb67fd9-fh25j\" (UID: \"015e1c15-2d65-42f6-8883-b0be2b5dc0ef\") " pod="openstack/neutron-554fb67fd9-fh25j" Sep 29 19:16:04 crc kubenswrapper[4792]: I0929 19:16:04.051669 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wjgzk\" (UniqueName: \"kubernetes.io/projected/015e1c15-2d65-42f6-8883-b0be2b5dc0ef-kube-api-access-wjgzk\") pod \"neutron-554fb67fd9-fh25j\" (UID: \"015e1c15-2d65-42f6-8883-b0be2b5dc0ef\") " pod="openstack/neutron-554fb67fd9-fh25j" Sep 29 19:16:04 crc kubenswrapper[4792]: I0929 19:16:04.149818 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-554fb67fd9-fh25j" Sep 29 19:16:04 crc kubenswrapper[4792]: I0929 19:16:04.737082 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-8494dffd6-7rx5p" event={"ID":"23845288-b122-49f0-b10d-641cfb94b66f","Type":"ContainerStarted","Data":"ba4fad29daf04b86413b14eac8b4af6c2fc96439c9e2ae8ddad84953ed1a5fd6"} Sep 29 19:16:04 crc kubenswrapper[4792]: I0929 19:16:04.739378 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6b7b667979-6qkxx" event={"ID":"b7b5b809-50d5-467e-9faf-3d4398f81b15","Type":"ContainerStarted","Data":"35b9217f49e1a490d6c94be56e7b3250f2a452ab9417107400a369fa0c6e4228"} Sep 29 19:16:04 crc kubenswrapper[4792]: I0929 19:16:04.740549 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-6b7b667979-6qkxx" Sep 29 19:16:04 crc kubenswrapper[4792]: I0929 19:16:04.746295 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-76468fd5f8-gfqwb" event={"ID":"e348b56e-dfaa-42ff-b8d4-9ce6d2900cce","Type":"ContainerStarted","Data":"191e64ed10d4f9889b7edc026e4179cae69c2f165fe2028869de4a6c85e655f8"} Sep 29 19:16:04 crc kubenswrapper[4792]: I0929 19:16:04.746963 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/neutron-76468fd5f8-gfqwb" Sep 29 19:16:04 crc kubenswrapper[4792]: I0929 19:16:04.748292 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"06538688-0bb7-45ae-a249-94ba5c312b2b","Type":"ContainerStarted","Data":"d4b42977ca66bff717dab89a0ce6b7cd93d441d8d89cea411770b90a82e93fcd"} Sep 29 19:16:04 crc kubenswrapper[4792]: I0929 19:16:04.750614 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-dfd9c6b56-wq84c" event={"ID":"eb79e1ef-cf0c-407b-9b37-c7ad8d65a3cc","Type":"ContainerStarted","Data":"30df7ac6c56065d05590b40fa8b60c11ef56e2f8dbc338a1dc4730d3f00fb6e0"} Sep 29 19:16:04 crc kubenswrapper[4792]: I0929 19:16:04.750646 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-dfd9c6b56-wq84c" event={"ID":"eb79e1ef-cf0c-407b-9b37-c7ad8d65a3cc","Type":"ContainerStarted","Data":"44478d1bf7a5ee27026ad7e66c3d25fb1975493dd5e5e44e83e0e00c55fcb046"} Sep 29 19:16:04 crc kubenswrapper[4792]: I0929 19:16:04.762551 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-6b7b667979-6qkxx" podStartSLOduration=4.762535037 podStartE2EDuration="4.762535037s" podCreationTimestamp="2025-09-29 19:16:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 19:16:04.758172811 +0000 UTC m=+1176.751480227" watchObservedRunningTime="2025-09-29 19:16:04.762535037 +0000 UTC m=+1176.755842423" Sep 29 19:16:04 crc kubenswrapper[4792]: I0929 19:16:04.785057 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/horizon-dfd9c6b56-wq84c" podStartSLOduration=31.006339341 podStartE2EDuration="32.785037117s" podCreationTimestamp="2025-09-29 19:15:32 +0000 UTC" firstStartedPulling="2025-09-29 19:16:01.843234392 +0000 UTC m=+1173.836541788" lastFinishedPulling="2025-09-29 19:16:03.621932168 +0000 UTC m=+1175.615239564" observedRunningTime="2025-09-29 19:16:04.779196042 +0000 UTC m=+1176.772503448" watchObservedRunningTime="2025-09-29 19:16:04.785037117 +0000 UTC m=+1176.778344513" Sep 29 19:16:04 crc kubenswrapper[4792]: I0929 19:16:04.801467 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-76468fd5f8-gfqwb" podStartSLOduration=4.801451415 podStartE2EDuration="4.801451415s" podCreationTimestamp="2025-09-29 19:16:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 19:16:04.797774327 +0000 UTC m=+1176.791081713" watchObservedRunningTime="2025-09-29 19:16:04.801451415 +0000 UTC m=+1176.794758811" Sep 29 19:16:04 crc kubenswrapper[4792]: I0929 19:16:04.913022 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-554fb67fd9-fh25j"] Sep 29 19:16:04 crc kubenswrapper[4792]: W0929 19:16:04.935433 4792 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod015e1c15_2d65_42f6_8883_b0be2b5dc0ef.slice/crio-a41d47c023bbd2fb1d1b5591e54ec437d8c7754aba98c47f6a0fad2a2f90fd7d WatchSource:0}: Error finding container a41d47c023bbd2fb1d1b5591e54ec437d8c7754aba98c47f6a0fad2a2f90fd7d: Status 404 returned error can't find the container with id a41d47c023bbd2fb1d1b5591e54ec437d8c7754aba98c47f6a0fad2a2f90fd7d Sep 29 19:16:05 crc kubenswrapper[4792]: I0929 19:16:05.777022 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"cf2861f4-a8d6-4c0e-bb03-bd2c51def90a","Type":"ContainerStarted","Data":"c538a8a06bd4d6b77b452474e62bd5618a1cf99a8a20eb83f58d03a2a24f4cdf"} Sep 29 19:16:05 crc kubenswrapper[4792]: I0929 19:16:05.787062 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"5ca97462-e055-4294-856f-895f0be44759","Type":"ContainerStarted","Data":"2ca2d4853a459f920e23211a598e56ef2cf0fdcba78477befa00e74e9b2b2212"} Sep 29 19:16:05 crc kubenswrapper[4792]: I0929 19:16:05.787409 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="5ca97462-e055-4294-856f-895f0be44759" containerName="glance-log" containerID="cri-o://f57851132b296920e829d7a4823ab83290b8c0d33e7fd79268fdd79403bb4d69" gracePeriod=30 Sep 29 19:16:05 crc kubenswrapper[4792]: I0929 19:16:05.787740 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="5ca97462-e055-4294-856f-895f0be44759" containerName="glance-httpd" containerID="cri-o://2ca2d4853a459f920e23211a598e56ef2cf0fdcba78477befa00e74e9b2b2212" gracePeriod=30 Sep 29 19:16:05 crc kubenswrapper[4792]: I0929 19:16:05.794226 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-554fb67fd9-fh25j" event={"ID":"015e1c15-2d65-42f6-8883-b0be2b5dc0ef","Type":"ContainerStarted","Data":"4debe432602fa41d9533fd4ec403e0e09b22bcfe3fdba1008f09364fce629e9d"} Sep 29 19:16:05 crc kubenswrapper[4792]: I0929 19:16:05.794415 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-554fb67fd9-fh25j" event={"ID":"015e1c15-2d65-42f6-8883-b0be2b5dc0ef","Type":"ContainerStarted","Data":"f7e905d59d30e7431d9b666604447b2cc5676836f480f4aa1cb93bccf6478653"} Sep 29 19:16:05 crc kubenswrapper[4792]: I0929 19:16:05.794534 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-554fb67fd9-fh25j" event={"ID":"015e1c15-2d65-42f6-8883-b0be2b5dc0ef","Type":"ContainerStarted","Data":"a41d47c023bbd2fb1d1b5591e54ec437d8c7754aba98c47f6a0fad2a2f90fd7d"} Sep 29 19:16:05 crc kubenswrapper[4792]: I0929 19:16:05.795312 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/neutron-554fb67fd9-fh25j" Sep 29 19:16:05 crc kubenswrapper[4792]: I0929 19:16:05.798983 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-8494dffd6-7rx5p" event={"ID":"23845288-b122-49f0-b10d-641cfb94b66f","Type":"ContainerStarted","Data":"e157e75d292130bd4389d439006d4ea52a41ceebecf0771101d06500e2e20e69"} Sep 29 19:16:05 crc kubenswrapper[4792]: I0929 19:16:05.863765 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=24.863743595 podStartE2EDuration="24.863743595s" podCreationTimestamp="2025-09-29 19:15:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 19:16:05.821752405 +0000 UTC m=+1177.815059961" watchObservedRunningTime="2025-09-29 19:16:05.863743595 +0000 UTC m=+1177.857050991" Sep 29 19:16:05 crc kubenswrapper[4792]: I0929 19:16:05.897492 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-554fb67fd9-fh25j" podStartSLOduration=2.897470395 podStartE2EDuration="2.897470395s" podCreationTimestamp="2025-09-29 19:16:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 19:16:05.86955449 +0000 UTC m=+1177.862861886" watchObservedRunningTime="2025-09-29 19:16:05.897470395 +0000 UTC m=+1177.890777791" Sep 29 19:16:05 crc kubenswrapper[4792]: I0929 19:16:05.915140 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/horizon-8494dffd6-7rx5p" podStartSLOduration=32.862362869 podStartE2EDuration="33.915122565s" podCreationTimestamp="2025-09-29 19:15:32 +0000 UTC" firstStartedPulling="2025-09-29 19:16:01.536221954 +0000 UTC m=+1173.529529350" lastFinishedPulling="2025-09-29 19:16:02.58898165 +0000 UTC m=+1174.582289046" observedRunningTime="2025-09-29 19:16:05.888648139 +0000 UTC m=+1177.881955535" watchObservedRunningTime="2025-09-29 19:16:05.915122565 +0000 UTC m=+1177.908429961" Sep 29 19:16:05 crc kubenswrapper[4792]: I0929 19:16:05.920551 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=34.92052904 podStartE2EDuration="34.92052904s" podCreationTimestamp="2025-09-29 19:15:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 19:16:05.913371449 +0000 UTC m=+1177.906678845" watchObservedRunningTime="2025-09-29 19:16:05.92052904 +0000 UTC m=+1177.913836436" Sep 29 19:16:06 crc kubenswrapper[4792]: E0929 19:16:06.124203 4792 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5ca97462_e055_4294_856f_895f0be44759.slice/crio-2ca2d4853a459f920e23211a598e56ef2cf0fdcba78477befa00e74e9b2b2212.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5ca97462_e055_4294_856f_895f0be44759.slice/crio-f57851132b296920e829d7a4823ab83290b8c0d33e7fd79268fdd79403bb4d69.scope\": RecentStats: unable to find data in memory cache]" Sep 29 19:16:06 crc kubenswrapper[4792]: I0929 19:16:06.503182 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Sep 29 19:16:06 crc kubenswrapper[4792]: I0929 19:16:06.661659 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/5ca97462-e055-4294-856f-895f0be44759-public-tls-certs\") pod \"5ca97462-e055-4294-856f-895f0be44759\" (UID: \"5ca97462-e055-4294-856f-895f0be44759\") " Sep 29 19:16:06 crc kubenswrapper[4792]: I0929 19:16:06.661793 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bzqgm\" (UniqueName: \"kubernetes.io/projected/5ca97462-e055-4294-856f-895f0be44759-kube-api-access-bzqgm\") pod \"5ca97462-e055-4294-856f-895f0be44759\" (UID: \"5ca97462-e055-4294-856f-895f0be44759\") " Sep 29 19:16:06 crc kubenswrapper[4792]: I0929 19:16:06.661829 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5ca97462-e055-4294-856f-895f0be44759-logs\") pod \"5ca97462-e055-4294-856f-895f0be44759\" (UID: \"5ca97462-e055-4294-856f-895f0be44759\") " Sep 29 19:16:06 crc kubenswrapper[4792]: I0929 19:16:06.661868 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/5ca97462-e055-4294-856f-895f0be44759-httpd-run\") pod \"5ca97462-e055-4294-856f-895f0be44759\" (UID: \"5ca97462-e055-4294-856f-895f0be44759\") " Sep 29 19:16:06 crc kubenswrapper[4792]: I0929 19:16:06.661887 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"5ca97462-e055-4294-856f-895f0be44759\" (UID: \"5ca97462-e055-4294-856f-895f0be44759\") " Sep 29 19:16:06 crc kubenswrapper[4792]: I0929 19:16:06.661915 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5ca97462-e055-4294-856f-895f0be44759-config-data\") pod \"5ca97462-e055-4294-856f-895f0be44759\" (UID: \"5ca97462-e055-4294-856f-895f0be44759\") " Sep 29 19:16:06 crc kubenswrapper[4792]: I0929 19:16:06.661958 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5ca97462-e055-4294-856f-895f0be44759-scripts\") pod \"5ca97462-e055-4294-856f-895f0be44759\" (UID: \"5ca97462-e055-4294-856f-895f0be44759\") " Sep 29 19:16:06 crc kubenswrapper[4792]: I0929 19:16:06.661989 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5ca97462-e055-4294-856f-895f0be44759-combined-ca-bundle\") pod \"5ca97462-e055-4294-856f-895f0be44759\" (UID: \"5ca97462-e055-4294-856f-895f0be44759\") " Sep 29 19:16:06 crc kubenswrapper[4792]: I0929 19:16:06.662553 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5ca97462-e055-4294-856f-895f0be44759-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "5ca97462-e055-4294-856f-895f0be44759" (UID: "5ca97462-e055-4294-856f-895f0be44759"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 19:16:06 crc kubenswrapper[4792]: I0929 19:16:06.662708 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5ca97462-e055-4294-856f-895f0be44759-logs" (OuterVolumeSpecName: "logs") pod "5ca97462-e055-4294-856f-895f0be44759" (UID: "5ca97462-e055-4294-856f-895f0be44759"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 19:16:06 crc kubenswrapper[4792]: I0929 19:16:06.663189 4792 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5ca97462-e055-4294-856f-895f0be44759-logs\") on node \"crc\" DevicePath \"\"" Sep 29 19:16:06 crc kubenswrapper[4792]: I0929 19:16:06.663210 4792 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/5ca97462-e055-4294-856f-895f0be44759-httpd-run\") on node \"crc\" DevicePath \"\"" Sep 29 19:16:06 crc kubenswrapper[4792]: I0929 19:16:06.691037 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5ca97462-e055-4294-856f-895f0be44759-kube-api-access-bzqgm" (OuterVolumeSpecName: "kube-api-access-bzqgm") pod "5ca97462-e055-4294-856f-895f0be44759" (UID: "5ca97462-e055-4294-856f-895f0be44759"). InnerVolumeSpecName "kube-api-access-bzqgm". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:16:06 crc kubenswrapper[4792]: I0929 19:16:06.697284 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage04-crc" (OuterVolumeSpecName: "glance") pod "5ca97462-e055-4294-856f-895f0be44759" (UID: "5ca97462-e055-4294-856f-895f0be44759"). InnerVolumeSpecName "local-storage04-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Sep 29 19:16:06 crc kubenswrapper[4792]: I0929 19:16:06.699564 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5ca97462-e055-4294-856f-895f0be44759-scripts" (OuterVolumeSpecName: "scripts") pod "5ca97462-e055-4294-856f-895f0be44759" (UID: "5ca97462-e055-4294-856f-895f0be44759"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:16:06 crc kubenswrapper[4792]: I0929 19:16:06.714669 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5ca97462-e055-4294-856f-895f0be44759-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "5ca97462-e055-4294-856f-895f0be44759" (UID: "5ca97462-e055-4294-856f-895f0be44759"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:16:06 crc kubenswrapper[4792]: I0929 19:16:06.747554 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5ca97462-e055-4294-856f-895f0be44759-config-data" (OuterVolumeSpecName: "config-data") pod "5ca97462-e055-4294-856f-895f0be44759" (UID: "5ca97462-e055-4294-856f-895f0be44759"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:16:06 crc kubenswrapper[4792]: I0929 19:16:06.765703 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bzqgm\" (UniqueName: \"kubernetes.io/projected/5ca97462-e055-4294-856f-895f0be44759-kube-api-access-bzqgm\") on node \"crc\" DevicePath \"\"" Sep 29 19:16:06 crc kubenswrapper[4792]: I0929 19:16:06.765752 4792 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") on node \"crc\" " Sep 29 19:16:06 crc kubenswrapper[4792]: I0929 19:16:06.765763 4792 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5ca97462-e055-4294-856f-895f0be44759-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 19:16:06 crc kubenswrapper[4792]: I0929 19:16:06.765771 4792 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5ca97462-e055-4294-856f-895f0be44759-scripts\") on node \"crc\" DevicePath \"\"" Sep 29 19:16:06 crc kubenswrapper[4792]: I0929 19:16:06.765780 4792 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5ca97462-e055-4294-856f-895f0be44759-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 19:16:06 crc kubenswrapper[4792]: I0929 19:16:06.789922 4792 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage04-crc" (UniqueName: "kubernetes.io/local-volume/local-storage04-crc") on node "crc" Sep 29 19:16:06 crc kubenswrapper[4792]: I0929 19:16:06.806789 4792 generic.go:334] "Generic (PLEG): container finished" podID="5ca97462-e055-4294-856f-895f0be44759" containerID="2ca2d4853a459f920e23211a598e56ef2cf0fdcba78477befa00e74e9b2b2212" exitCode=0 Sep 29 19:16:06 crc kubenswrapper[4792]: I0929 19:16:06.806822 4792 generic.go:334] "Generic (PLEG): container finished" podID="5ca97462-e055-4294-856f-895f0be44759" containerID="f57851132b296920e829d7a4823ab83290b8c0d33e7fd79268fdd79403bb4d69" exitCode=143 Sep 29 19:16:06 crc kubenswrapper[4792]: I0929 19:16:06.808035 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Sep 29 19:16:06 crc kubenswrapper[4792]: I0929 19:16:06.808429 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"5ca97462-e055-4294-856f-895f0be44759","Type":"ContainerDied","Data":"2ca2d4853a459f920e23211a598e56ef2cf0fdcba78477befa00e74e9b2b2212"} Sep 29 19:16:06 crc kubenswrapper[4792]: I0929 19:16:06.808456 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"5ca97462-e055-4294-856f-895f0be44759","Type":"ContainerDied","Data":"f57851132b296920e829d7a4823ab83290b8c0d33e7fd79268fdd79403bb4d69"} Sep 29 19:16:06 crc kubenswrapper[4792]: I0929 19:16:06.808468 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"5ca97462-e055-4294-856f-895f0be44759","Type":"ContainerDied","Data":"2555f855ff78c563addd07979b9c2662b152cc0f852f540813c6cd5f7a3efd1b"} Sep 29 19:16:06 crc kubenswrapper[4792]: I0929 19:16:06.808481 4792 scope.go:117] "RemoveContainer" containerID="2ca2d4853a459f920e23211a598e56ef2cf0fdcba78477befa00e74e9b2b2212" Sep 29 19:16:06 crc kubenswrapper[4792]: I0929 19:16:06.820752 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5ca97462-e055-4294-856f-895f0be44759-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "5ca97462-e055-4294-856f-895f0be44759" (UID: "5ca97462-e055-4294-856f-895f0be44759"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:16:06 crc kubenswrapper[4792]: I0929 19:16:06.867344 4792 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/5ca97462-e055-4294-856f-895f0be44759-public-tls-certs\") on node \"crc\" DevicePath \"\"" Sep 29 19:16:06 crc kubenswrapper[4792]: I0929 19:16:06.867377 4792 reconciler_common.go:293] "Volume detached for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") on node \"crc\" DevicePath \"\"" Sep 29 19:16:07 crc kubenswrapper[4792]: I0929 19:16:07.128400 4792 scope.go:117] "RemoveContainer" containerID="f57851132b296920e829d7a4823ab83290b8c0d33e7fd79268fdd79403bb4d69" Sep 29 19:16:07 crc kubenswrapper[4792]: I0929 19:16:07.132726 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Sep 29 19:16:07 crc kubenswrapper[4792]: I0929 19:16:07.141004 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-external-api-0"] Sep 29 19:16:07 crc kubenswrapper[4792]: I0929 19:16:07.164421 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Sep 29 19:16:07 crc kubenswrapper[4792]: E0929 19:16:07.164775 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5ca97462-e055-4294-856f-895f0be44759" containerName="glance-log" Sep 29 19:16:07 crc kubenswrapper[4792]: I0929 19:16:07.164787 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="5ca97462-e055-4294-856f-895f0be44759" containerName="glance-log" Sep 29 19:16:07 crc kubenswrapper[4792]: E0929 19:16:07.164803 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5ca97462-e055-4294-856f-895f0be44759" containerName="glance-httpd" Sep 29 19:16:07 crc kubenswrapper[4792]: I0929 19:16:07.164809 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="5ca97462-e055-4294-856f-895f0be44759" containerName="glance-httpd" Sep 29 19:16:07 crc kubenswrapper[4792]: I0929 19:16:07.164989 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="5ca97462-e055-4294-856f-895f0be44759" containerName="glance-httpd" Sep 29 19:16:07 crc kubenswrapper[4792]: I0929 19:16:07.165017 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="5ca97462-e055-4294-856f-895f0be44759" containerName="glance-log" Sep 29 19:16:07 crc kubenswrapper[4792]: I0929 19:16:07.165845 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Sep 29 19:16:07 crc kubenswrapper[4792]: I0929 19:16:07.175316 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-public-svc" Sep 29 19:16:07 crc kubenswrapper[4792]: I0929 19:16:07.175362 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Sep 29 19:16:07 crc kubenswrapper[4792]: I0929 19:16:07.187888 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Sep 29 19:16:07 crc kubenswrapper[4792]: I0929 19:16:07.191365 4792 scope.go:117] "RemoveContainer" containerID="2ca2d4853a459f920e23211a598e56ef2cf0fdcba78477befa00e74e9b2b2212" Sep 29 19:16:07 crc kubenswrapper[4792]: E0929 19:16:07.192109 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2ca2d4853a459f920e23211a598e56ef2cf0fdcba78477befa00e74e9b2b2212\": container with ID starting with 2ca2d4853a459f920e23211a598e56ef2cf0fdcba78477befa00e74e9b2b2212 not found: ID does not exist" containerID="2ca2d4853a459f920e23211a598e56ef2cf0fdcba78477befa00e74e9b2b2212" Sep 29 19:16:07 crc kubenswrapper[4792]: I0929 19:16:07.192144 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2ca2d4853a459f920e23211a598e56ef2cf0fdcba78477befa00e74e9b2b2212"} err="failed to get container status \"2ca2d4853a459f920e23211a598e56ef2cf0fdcba78477befa00e74e9b2b2212\": rpc error: code = NotFound desc = could not find container \"2ca2d4853a459f920e23211a598e56ef2cf0fdcba78477befa00e74e9b2b2212\": container with ID starting with 2ca2d4853a459f920e23211a598e56ef2cf0fdcba78477befa00e74e9b2b2212 not found: ID does not exist" Sep 29 19:16:07 crc kubenswrapper[4792]: I0929 19:16:07.192170 4792 scope.go:117] "RemoveContainer" containerID="f57851132b296920e829d7a4823ab83290b8c0d33e7fd79268fdd79403bb4d69" Sep 29 19:16:07 crc kubenswrapper[4792]: E0929 19:16:07.201320 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f57851132b296920e829d7a4823ab83290b8c0d33e7fd79268fdd79403bb4d69\": container with ID starting with f57851132b296920e829d7a4823ab83290b8c0d33e7fd79268fdd79403bb4d69 not found: ID does not exist" containerID="f57851132b296920e829d7a4823ab83290b8c0d33e7fd79268fdd79403bb4d69" Sep 29 19:16:07 crc kubenswrapper[4792]: I0929 19:16:07.201366 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f57851132b296920e829d7a4823ab83290b8c0d33e7fd79268fdd79403bb4d69"} err="failed to get container status \"f57851132b296920e829d7a4823ab83290b8c0d33e7fd79268fdd79403bb4d69\": rpc error: code = NotFound desc = could not find container \"f57851132b296920e829d7a4823ab83290b8c0d33e7fd79268fdd79403bb4d69\": container with ID starting with f57851132b296920e829d7a4823ab83290b8c0d33e7fd79268fdd79403bb4d69 not found: ID does not exist" Sep 29 19:16:07 crc kubenswrapper[4792]: I0929 19:16:07.201393 4792 scope.go:117] "RemoveContainer" containerID="2ca2d4853a459f920e23211a598e56ef2cf0fdcba78477befa00e74e9b2b2212" Sep 29 19:16:07 crc kubenswrapper[4792]: I0929 19:16:07.231868 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2ca2d4853a459f920e23211a598e56ef2cf0fdcba78477befa00e74e9b2b2212"} err="failed to get container status \"2ca2d4853a459f920e23211a598e56ef2cf0fdcba78477befa00e74e9b2b2212\": rpc error: code = NotFound desc = could not find container \"2ca2d4853a459f920e23211a598e56ef2cf0fdcba78477befa00e74e9b2b2212\": container with ID starting with 2ca2d4853a459f920e23211a598e56ef2cf0fdcba78477befa00e74e9b2b2212 not found: ID does not exist" Sep 29 19:16:07 crc kubenswrapper[4792]: I0929 19:16:07.232149 4792 scope.go:117] "RemoveContainer" containerID="f57851132b296920e829d7a4823ab83290b8c0d33e7fd79268fdd79403bb4d69" Sep 29 19:16:07 crc kubenswrapper[4792]: I0929 19:16:07.232716 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f57851132b296920e829d7a4823ab83290b8c0d33e7fd79268fdd79403bb4d69"} err="failed to get container status \"f57851132b296920e829d7a4823ab83290b8c0d33e7fd79268fdd79403bb4d69\": rpc error: code = NotFound desc = could not find container \"f57851132b296920e829d7a4823ab83290b8c0d33e7fd79268fdd79403bb4d69\": container with ID starting with f57851132b296920e829d7a4823ab83290b8c0d33e7fd79268fdd79403bb4d69 not found: ID does not exist" Sep 29 19:16:07 crc kubenswrapper[4792]: I0929 19:16:07.280539 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"glance-default-external-api-0\" (UID: \"ffd2c1db-2c19-492f-8783-f03f235013da\") " pod="openstack/glance-default-external-api-0" Sep 29 19:16:07 crc kubenswrapper[4792]: I0929 19:16:07.280591 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ffd2c1db-2c19-492f-8783-f03f235013da-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"ffd2c1db-2c19-492f-8783-f03f235013da\") " pod="openstack/glance-default-external-api-0" Sep 29 19:16:07 crc kubenswrapper[4792]: I0929 19:16:07.280622 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/ffd2c1db-2c19-492f-8783-f03f235013da-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"ffd2c1db-2c19-492f-8783-f03f235013da\") " pod="openstack/glance-default-external-api-0" Sep 29 19:16:07 crc kubenswrapper[4792]: I0929 19:16:07.280669 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/ffd2c1db-2c19-492f-8783-f03f235013da-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"ffd2c1db-2c19-492f-8783-f03f235013da\") " pod="openstack/glance-default-external-api-0" Sep 29 19:16:07 crc kubenswrapper[4792]: I0929 19:16:07.280763 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ffd2c1db-2c19-492f-8783-f03f235013da-scripts\") pod \"glance-default-external-api-0\" (UID: \"ffd2c1db-2c19-492f-8783-f03f235013da\") " pod="openstack/glance-default-external-api-0" Sep 29 19:16:07 crc kubenswrapper[4792]: I0929 19:16:07.284111 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ffd2c1db-2c19-492f-8783-f03f235013da-logs\") pod \"glance-default-external-api-0\" (UID: \"ffd2c1db-2c19-492f-8783-f03f235013da\") " pod="openstack/glance-default-external-api-0" Sep 29 19:16:07 crc kubenswrapper[4792]: I0929 19:16:07.284400 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cnhzc\" (UniqueName: \"kubernetes.io/projected/ffd2c1db-2c19-492f-8783-f03f235013da-kube-api-access-cnhzc\") pod \"glance-default-external-api-0\" (UID: \"ffd2c1db-2c19-492f-8783-f03f235013da\") " pod="openstack/glance-default-external-api-0" Sep 29 19:16:07 crc kubenswrapper[4792]: I0929 19:16:07.284772 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ffd2c1db-2c19-492f-8783-f03f235013da-config-data\") pod \"glance-default-external-api-0\" (UID: \"ffd2c1db-2c19-492f-8783-f03f235013da\") " pod="openstack/glance-default-external-api-0" Sep 29 19:16:07 crc kubenswrapper[4792]: I0929 19:16:07.386328 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ffd2c1db-2c19-492f-8783-f03f235013da-config-data\") pod \"glance-default-external-api-0\" (UID: \"ffd2c1db-2c19-492f-8783-f03f235013da\") " pod="openstack/glance-default-external-api-0" Sep 29 19:16:07 crc kubenswrapper[4792]: I0929 19:16:07.386414 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"glance-default-external-api-0\" (UID: \"ffd2c1db-2c19-492f-8783-f03f235013da\") " pod="openstack/glance-default-external-api-0" Sep 29 19:16:07 crc kubenswrapper[4792]: I0929 19:16:07.386455 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ffd2c1db-2c19-492f-8783-f03f235013da-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"ffd2c1db-2c19-492f-8783-f03f235013da\") " pod="openstack/glance-default-external-api-0" Sep 29 19:16:07 crc kubenswrapper[4792]: I0929 19:16:07.386476 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/ffd2c1db-2c19-492f-8783-f03f235013da-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"ffd2c1db-2c19-492f-8783-f03f235013da\") " pod="openstack/glance-default-external-api-0" Sep 29 19:16:07 crc kubenswrapper[4792]: I0929 19:16:07.386522 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/ffd2c1db-2c19-492f-8783-f03f235013da-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"ffd2c1db-2c19-492f-8783-f03f235013da\") " pod="openstack/glance-default-external-api-0" Sep 29 19:16:07 crc kubenswrapper[4792]: I0929 19:16:07.386538 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ffd2c1db-2c19-492f-8783-f03f235013da-scripts\") pod \"glance-default-external-api-0\" (UID: \"ffd2c1db-2c19-492f-8783-f03f235013da\") " pod="openstack/glance-default-external-api-0" Sep 29 19:16:07 crc kubenswrapper[4792]: I0929 19:16:07.386564 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ffd2c1db-2c19-492f-8783-f03f235013da-logs\") pod \"glance-default-external-api-0\" (UID: \"ffd2c1db-2c19-492f-8783-f03f235013da\") " pod="openstack/glance-default-external-api-0" Sep 29 19:16:07 crc kubenswrapper[4792]: I0929 19:16:07.386600 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cnhzc\" (UniqueName: \"kubernetes.io/projected/ffd2c1db-2c19-492f-8783-f03f235013da-kube-api-access-cnhzc\") pod \"glance-default-external-api-0\" (UID: \"ffd2c1db-2c19-492f-8783-f03f235013da\") " pod="openstack/glance-default-external-api-0" Sep 29 19:16:07 crc kubenswrapper[4792]: I0929 19:16:07.386715 4792 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"glance-default-external-api-0\" (UID: \"ffd2c1db-2c19-492f-8783-f03f235013da\") device mount path \"/mnt/openstack/pv04\"" pod="openstack/glance-default-external-api-0" Sep 29 19:16:07 crc kubenswrapper[4792]: I0929 19:16:07.387213 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/ffd2c1db-2c19-492f-8783-f03f235013da-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"ffd2c1db-2c19-492f-8783-f03f235013da\") " pod="openstack/glance-default-external-api-0" Sep 29 19:16:07 crc kubenswrapper[4792]: I0929 19:16:07.391276 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ffd2c1db-2c19-492f-8783-f03f235013da-logs\") pod \"glance-default-external-api-0\" (UID: \"ffd2c1db-2c19-492f-8783-f03f235013da\") " pod="openstack/glance-default-external-api-0" Sep 29 19:16:07 crc kubenswrapper[4792]: I0929 19:16:07.405407 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/ffd2c1db-2c19-492f-8783-f03f235013da-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"ffd2c1db-2c19-492f-8783-f03f235013da\") " pod="openstack/glance-default-external-api-0" Sep 29 19:16:07 crc kubenswrapper[4792]: I0929 19:16:07.408525 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ffd2c1db-2c19-492f-8783-f03f235013da-scripts\") pod \"glance-default-external-api-0\" (UID: \"ffd2c1db-2c19-492f-8783-f03f235013da\") " pod="openstack/glance-default-external-api-0" Sep 29 19:16:07 crc kubenswrapper[4792]: I0929 19:16:07.412737 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cnhzc\" (UniqueName: \"kubernetes.io/projected/ffd2c1db-2c19-492f-8783-f03f235013da-kube-api-access-cnhzc\") pod \"glance-default-external-api-0\" (UID: \"ffd2c1db-2c19-492f-8783-f03f235013da\") " pod="openstack/glance-default-external-api-0" Sep 29 19:16:07 crc kubenswrapper[4792]: I0929 19:16:07.412909 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ffd2c1db-2c19-492f-8783-f03f235013da-config-data\") pod \"glance-default-external-api-0\" (UID: \"ffd2c1db-2c19-492f-8783-f03f235013da\") " pod="openstack/glance-default-external-api-0" Sep 29 19:16:07 crc kubenswrapper[4792]: I0929 19:16:07.428282 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ffd2c1db-2c19-492f-8783-f03f235013da-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"ffd2c1db-2c19-492f-8783-f03f235013da\") " pod="openstack/glance-default-external-api-0" Sep 29 19:16:07 crc kubenswrapper[4792]: I0929 19:16:07.451542 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"glance-default-external-api-0\" (UID: \"ffd2c1db-2c19-492f-8783-f03f235013da\") " pod="openstack/glance-default-external-api-0" Sep 29 19:16:07 crc kubenswrapper[4792]: I0929 19:16:07.523041 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Sep 29 19:16:08 crc kubenswrapper[4792]: I0929 19:16:08.213509 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Sep 29 19:16:09 crc kubenswrapper[4792]: I0929 19:16:09.025082 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5ca97462-e055-4294-856f-895f0be44759" path="/var/lib/kubelet/pods/5ca97462-e055-4294-856f-895f0be44759/volumes" Sep 29 19:16:09 crc kubenswrapper[4792]: I0929 19:16:09.846933 4792 generic.go:334] "Generic (PLEG): container finished" podID="b45ae86a-64ce-47be-a568-021cf9da5107" containerID="669c7cc1f802e3c741ab812ebea14d9a705e2cf6d0558e155f535ddf3ec6e8fd" exitCode=0 Sep 29 19:16:09 crc kubenswrapper[4792]: I0929 19:16:09.847013 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-qwkmr" event={"ID":"b45ae86a-64ce-47be-a568-021cf9da5107","Type":"ContainerDied","Data":"669c7cc1f802e3c741ab812ebea14d9a705e2cf6d0558e155f535ddf3ec6e8fd"} Sep 29 19:16:10 crc kubenswrapper[4792]: I0929 19:16:10.428488 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-6b7b667979-6qkxx" Sep 29 19:16:10 crc kubenswrapper[4792]: I0929 19:16:10.489810 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-56df8fb6b7-rh9hv"] Sep 29 19:16:10 crc kubenswrapper[4792]: I0929 19:16:10.498625 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-56df8fb6b7-rh9hv" podUID="fc9bf108-a60d-4111-b064-f37789e2d7c1" containerName="dnsmasq-dns" containerID="cri-o://ac99206b4580e0813cf9ec4a8332cbb6600a529c76e7de3d16dcd402f5c85615" gracePeriod=10 Sep 29 19:16:10 crc kubenswrapper[4792]: I0929 19:16:10.858498 4792 generic.go:334] "Generic (PLEG): container finished" podID="1d12f836-d8e3-46a3-bc92-64dae426f114" containerID="458f42dbafa406e8067bc4be0137d6c9a4b875c15a1507d9300b9941e91ba73f" exitCode=0 Sep 29 19:16:10 crc kubenswrapper[4792]: I0929 19:16:10.858556 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-5vlr7" event={"ID":"1d12f836-d8e3-46a3-bc92-64dae426f114","Type":"ContainerDied","Data":"458f42dbafa406e8067bc4be0137d6c9a4b875c15a1507d9300b9941e91ba73f"} Sep 29 19:16:10 crc kubenswrapper[4792]: I0929 19:16:10.860150 4792 generic.go:334] "Generic (PLEG): container finished" podID="3c3fc253-fb19-4845-a099-4754b7a55cdb" containerID="3feeb60329406e8936b0b75192cd78cb94d59aca2dca3a89c2f5d7219e9eeb9a" exitCode=0 Sep 29 19:16:10 crc kubenswrapper[4792]: I0929 19:16:10.860213 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-5bsn5" event={"ID":"3c3fc253-fb19-4845-a099-4754b7a55cdb","Type":"ContainerDied","Data":"3feeb60329406e8936b0b75192cd78cb94d59aca2dca3a89c2f5d7219e9eeb9a"} Sep 29 19:16:10 crc kubenswrapper[4792]: I0929 19:16:10.861440 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"ffd2c1db-2c19-492f-8783-f03f235013da","Type":"ContainerStarted","Data":"96aa30f00143b62ca1e0d28d6cc8c8206861898a50f6f3348a28796f3644dde2"} Sep 29 19:16:10 crc kubenswrapper[4792]: I0929 19:16:10.862993 4792 generic.go:334] "Generic (PLEG): container finished" podID="fc9bf108-a60d-4111-b064-f37789e2d7c1" containerID="ac99206b4580e0813cf9ec4a8332cbb6600a529c76e7de3d16dcd402f5c85615" exitCode=0 Sep 29 19:16:10 crc kubenswrapper[4792]: I0929 19:16:10.863165 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-56df8fb6b7-rh9hv" event={"ID":"fc9bf108-a60d-4111-b064-f37789e2d7c1","Type":"ContainerDied","Data":"ac99206b4580e0813cf9ec4a8332cbb6600a529c76e7de3d16dcd402f5c85615"} Sep 29 19:16:11 crc kubenswrapper[4792]: I0929 19:16:11.617734 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Sep 29 19:16:11 crc kubenswrapper[4792]: I0929 19:16:11.618075 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Sep 29 19:16:11 crc kubenswrapper[4792]: I0929 19:16:11.618085 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Sep 29 19:16:11 crc kubenswrapper[4792]: I0929 19:16:11.618094 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Sep 29 19:16:11 crc kubenswrapper[4792]: I0929 19:16:11.652152 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Sep 29 19:16:11 crc kubenswrapper[4792]: I0929 19:16:11.684728 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Sep 29 19:16:12 crc kubenswrapper[4792]: I0929 19:16:12.963210 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-dfd9c6b56-wq84c" Sep 29 19:16:12 crc kubenswrapper[4792]: I0929 19:16:12.963513 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/horizon-dfd9c6b56-wq84c" Sep 29 19:16:13 crc kubenswrapper[4792]: I0929 19:16:13.078670 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/horizon-8494dffd6-7rx5p" Sep 29 19:16:13 crc kubenswrapper[4792]: I0929 19:16:13.078713 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-8494dffd6-7rx5p" Sep 29 19:16:13 crc kubenswrapper[4792]: I0929 19:16:13.857839 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-5vlr7" Sep 29 19:16:13 crc kubenswrapper[4792]: I0929 19:16:13.919713 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-qwkmr" Sep 29 19:16:13 crc kubenswrapper[4792]: I0929 19:16:13.923446 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1d12f836-d8e3-46a3-bc92-64dae426f114-combined-ca-bundle\") pod \"1d12f836-d8e3-46a3-bc92-64dae426f114\" (UID: \"1d12f836-d8e3-46a3-bc92-64dae426f114\") " Sep 29 19:16:13 crc kubenswrapper[4792]: I0929 19:16:13.923514 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1d12f836-d8e3-46a3-bc92-64dae426f114-scripts\") pod \"1d12f836-d8e3-46a3-bc92-64dae426f114\" (UID: \"1d12f836-d8e3-46a3-bc92-64dae426f114\") " Sep 29 19:16:13 crc kubenswrapper[4792]: I0929 19:16:13.923579 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1d12f836-d8e3-46a3-bc92-64dae426f114-config-data\") pod \"1d12f836-d8e3-46a3-bc92-64dae426f114\" (UID: \"1d12f836-d8e3-46a3-bc92-64dae426f114\") " Sep 29 19:16:13 crc kubenswrapper[4792]: I0929 19:16:13.923650 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jwkft\" (UniqueName: \"kubernetes.io/projected/1d12f836-d8e3-46a3-bc92-64dae426f114-kube-api-access-jwkft\") pod \"1d12f836-d8e3-46a3-bc92-64dae426f114\" (UID: \"1d12f836-d8e3-46a3-bc92-64dae426f114\") " Sep 29 19:16:13 crc kubenswrapper[4792]: I0929 19:16:13.923668 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/1d12f836-d8e3-46a3-bc92-64dae426f114-fernet-keys\") pod \"1d12f836-d8e3-46a3-bc92-64dae426f114\" (UID: \"1d12f836-d8e3-46a3-bc92-64dae426f114\") " Sep 29 19:16:13 crc kubenswrapper[4792]: I0929 19:16:13.923683 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/1d12f836-d8e3-46a3-bc92-64dae426f114-credential-keys\") pod \"1d12f836-d8e3-46a3-bc92-64dae426f114\" (UID: \"1d12f836-d8e3-46a3-bc92-64dae426f114\") " Sep 29 19:16:13 crc kubenswrapper[4792]: I0929 19:16:13.928601 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-5bsn5" Sep 29 19:16:13 crc kubenswrapper[4792]: I0929 19:16:13.933061 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1d12f836-d8e3-46a3-bc92-64dae426f114-kube-api-access-jwkft" (OuterVolumeSpecName: "kube-api-access-jwkft") pod "1d12f836-d8e3-46a3-bc92-64dae426f114" (UID: "1d12f836-d8e3-46a3-bc92-64dae426f114"). InnerVolumeSpecName "kube-api-access-jwkft". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:16:13 crc kubenswrapper[4792]: I0929 19:16:13.934309 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-5bsn5" event={"ID":"3c3fc253-fb19-4845-a099-4754b7a55cdb","Type":"ContainerDied","Data":"ed033cb19fc1608719b10c5e51150efc9caaa419f9253fd09ba1deaac8974f3f"} Sep 29 19:16:13 crc kubenswrapper[4792]: I0929 19:16:13.934361 4792 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ed033cb19fc1608719b10c5e51150efc9caaa419f9253fd09ba1deaac8974f3f" Sep 29 19:16:13 crc kubenswrapper[4792]: I0929 19:16:13.934478 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1d12f836-d8e3-46a3-bc92-64dae426f114-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "1d12f836-d8e3-46a3-bc92-64dae426f114" (UID: "1d12f836-d8e3-46a3-bc92-64dae426f114"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:16:13 crc kubenswrapper[4792]: I0929 19:16:13.935038 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1d12f836-d8e3-46a3-bc92-64dae426f114-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "1d12f836-d8e3-46a3-bc92-64dae426f114" (UID: "1d12f836-d8e3-46a3-bc92-64dae426f114"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:16:13 crc kubenswrapper[4792]: I0929 19:16:13.937893 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1d12f836-d8e3-46a3-bc92-64dae426f114-scripts" (OuterVolumeSpecName: "scripts") pod "1d12f836-d8e3-46a3-bc92-64dae426f114" (UID: "1d12f836-d8e3-46a3-bc92-64dae426f114"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:16:13 crc kubenswrapper[4792]: I0929 19:16:13.954794 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-qwkmr" event={"ID":"b45ae86a-64ce-47be-a568-021cf9da5107","Type":"ContainerDied","Data":"739b1818966f4b471020cf3e4319a81094cd6ecf4857c233adb5432442c0c564"} Sep 29 19:16:13 crc kubenswrapper[4792]: I0929 19:16:13.954827 4792 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="739b1818966f4b471020cf3e4319a81094cd6ecf4857c233adb5432442c0c564" Sep 29 19:16:13 crc kubenswrapper[4792]: I0929 19:16:13.955006 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-qwkmr" Sep 29 19:16:13 crc kubenswrapper[4792]: I0929 19:16:13.957290 4792 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-56df8fb6b7-rh9hv" podUID="fc9bf108-a60d-4111-b064-f37789e2d7c1" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.144:5353: connect: connection refused" Sep 29 19:16:13 crc kubenswrapper[4792]: I0929 19:16:13.985329 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1d12f836-d8e3-46a3-bc92-64dae426f114-config-data" (OuterVolumeSpecName: "config-data") pod "1d12f836-d8e3-46a3-bc92-64dae426f114" (UID: "1d12f836-d8e3-46a3-bc92-64dae426f114"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:16:13 crc kubenswrapper[4792]: I0929 19:16:13.985419 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-5vlr7" event={"ID":"1d12f836-d8e3-46a3-bc92-64dae426f114","Type":"ContainerDied","Data":"0efb192d35dd984724a43c372b2c60835d8bc129073df8629d347e3e18914f79"} Sep 29 19:16:13 crc kubenswrapper[4792]: I0929 19:16:13.985508 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-5vlr7" Sep 29 19:16:13 crc kubenswrapper[4792]: I0929 19:16:13.985936 4792 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0efb192d35dd984724a43c372b2c60835d8bc129073df8629d347e3e18914f79" Sep 29 19:16:14 crc kubenswrapper[4792]: I0929 19:16:14.003142 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1d12f836-d8e3-46a3-bc92-64dae426f114-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "1d12f836-d8e3-46a3-bc92-64dae426f114" (UID: "1d12f836-d8e3-46a3-bc92-64dae426f114"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:16:14 crc kubenswrapper[4792]: I0929 19:16:14.026937 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b45ae86a-64ce-47be-a568-021cf9da5107-combined-ca-bundle\") pod \"b45ae86a-64ce-47be-a568-021cf9da5107\" (UID: \"b45ae86a-64ce-47be-a568-021cf9da5107\") " Sep 29 19:16:14 crc kubenswrapper[4792]: I0929 19:16:14.027012 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-t8xfx\" (UniqueName: \"kubernetes.io/projected/b45ae86a-64ce-47be-a568-021cf9da5107-kube-api-access-t8xfx\") pod \"b45ae86a-64ce-47be-a568-021cf9da5107\" (UID: \"b45ae86a-64ce-47be-a568-021cf9da5107\") " Sep 29 19:16:14 crc kubenswrapper[4792]: I0929 19:16:14.027068 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3c3fc253-fb19-4845-a099-4754b7a55cdb-combined-ca-bundle\") pod \"3c3fc253-fb19-4845-a099-4754b7a55cdb\" (UID: \"3c3fc253-fb19-4845-a099-4754b7a55cdb\") " Sep 29 19:16:14 crc kubenswrapper[4792]: I0929 19:16:14.027164 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b45ae86a-64ce-47be-a568-021cf9da5107-config-data\") pod \"b45ae86a-64ce-47be-a568-021cf9da5107\" (UID: \"b45ae86a-64ce-47be-a568-021cf9da5107\") " Sep 29 19:16:14 crc kubenswrapper[4792]: I0929 19:16:14.027231 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b45ae86a-64ce-47be-a568-021cf9da5107-logs\") pod \"b45ae86a-64ce-47be-a568-021cf9da5107\" (UID: \"b45ae86a-64ce-47be-a568-021cf9da5107\") " Sep 29 19:16:14 crc kubenswrapper[4792]: I0929 19:16:14.027320 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b45ae86a-64ce-47be-a568-021cf9da5107-scripts\") pod \"b45ae86a-64ce-47be-a568-021cf9da5107\" (UID: \"b45ae86a-64ce-47be-a568-021cf9da5107\") " Sep 29 19:16:14 crc kubenswrapper[4792]: I0929 19:16:14.027352 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fxgsk\" (UniqueName: \"kubernetes.io/projected/3c3fc253-fb19-4845-a099-4754b7a55cdb-kube-api-access-fxgsk\") pod \"3c3fc253-fb19-4845-a099-4754b7a55cdb\" (UID: \"3c3fc253-fb19-4845-a099-4754b7a55cdb\") " Sep 29 19:16:14 crc kubenswrapper[4792]: I0929 19:16:14.027379 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/3c3fc253-fb19-4845-a099-4754b7a55cdb-db-sync-config-data\") pod \"3c3fc253-fb19-4845-a099-4754b7a55cdb\" (UID: \"3c3fc253-fb19-4845-a099-4754b7a55cdb\") " Sep 29 19:16:14 crc kubenswrapper[4792]: I0929 19:16:14.030457 4792 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1d12f836-d8e3-46a3-bc92-64dae426f114-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 19:16:14 crc kubenswrapper[4792]: I0929 19:16:14.030495 4792 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1d12f836-d8e3-46a3-bc92-64dae426f114-scripts\") on node \"crc\" DevicePath \"\"" Sep 29 19:16:14 crc kubenswrapper[4792]: I0929 19:16:14.030512 4792 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1d12f836-d8e3-46a3-bc92-64dae426f114-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 19:16:14 crc kubenswrapper[4792]: I0929 19:16:14.030522 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jwkft\" (UniqueName: \"kubernetes.io/projected/1d12f836-d8e3-46a3-bc92-64dae426f114-kube-api-access-jwkft\") on node \"crc\" DevicePath \"\"" Sep 29 19:16:14 crc kubenswrapper[4792]: I0929 19:16:14.030532 4792 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/1d12f836-d8e3-46a3-bc92-64dae426f114-fernet-keys\") on node \"crc\" DevicePath \"\"" Sep 29 19:16:14 crc kubenswrapper[4792]: I0929 19:16:14.030541 4792 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/1d12f836-d8e3-46a3-bc92-64dae426f114-credential-keys\") on node \"crc\" DevicePath \"\"" Sep 29 19:16:14 crc kubenswrapper[4792]: I0929 19:16:14.055172 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b45ae86a-64ce-47be-a568-021cf9da5107-scripts" (OuterVolumeSpecName: "scripts") pod "b45ae86a-64ce-47be-a568-021cf9da5107" (UID: "b45ae86a-64ce-47be-a568-021cf9da5107"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:16:14 crc kubenswrapper[4792]: I0929 19:16:14.055511 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b45ae86a-64ce-47be-a568-021cf9da5107-logs" (OuterVolumeSpecName: "logs") pod "b45ae86a-64ce-47be-a568-021cf9da5107" (UID: "b45ae86a-64ce-47be-a568-021cf9da5107"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 19:16:14 crc kubenswrapper[4792]: I0929 19:16:14.059969 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3c3fc253-fb19-4845-a099-4754b7a55cdb-kube-api-access-fxgsk" (OuterVolumeSpecName: "kube-api-access-fxgsk") pod "3c3fc253-fb19-4845-a099-4754b7a55cdb" (UID: "3c3fc253-fb19-4845-a099-4754b7a55cdb"). InnerVolumeSpecName "kube-api-access-fxgsk". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:16:14 crc kubenswrapper[4792]: I0929 19:16:14.080988 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b45ae86a-64ce-47be-a568-021cf9da5107-kube-api-access-t8xfx" (OuterVolumeSpecName: "kube-api-access-t8xfx") pod "b45ae86a-64ce-47be-a568-021cf9da5107" (UID: "b45ae86a-64ce-47be-a568-021cf9da5107"). InnerVolumeSpecName "kube-api-access-t8xfx". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:16:14 crc kubenswrapper[4792]: I0929 19:16:14.087157 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b45ae86a-64ce-47be-a568-021cf9da5107-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "b45ae86a-64ce-47be-a568-021cf9da5107" (UID: "b45ae86a-64ce-47be-a568-021cf9da5107"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:16:14 crc kubenswrapper[4792]: I0929 19:16:14.090148 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3c3fc253-fb19-4845-a099-4754b7a55cdb-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "3c3fc253-fb19-4845-a099-4754b7a55cdb" (UID: "3c3fc253-fb19-4845-a099-4754b7a55cdb"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:16:14 crc kubenswrapper[4792]: I0929 19:16:14.132213 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-t8xfx\" (UniqueName: \"kubernetes.io/projected/b45ae86a-64ce-47be-a568-021cf9da5107-kube-api-access-t8xfx\") on node \"crc\" DevicePath \"\"" Sep 29 19:16:14 crc kubenswrapper[4792]: I0929 19:16:14.134899 4792 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b45ae86a-64ce-47be-a568-021cf9da5107-logs\") on node \"crc\" DevicePath \"\"" Sep 29 19:16:14 crc kubenswrapper[4792]: I0929 19:16:14.134981 4792 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b45ae86a-64ce-47be-a568-021cf9da5107-scripts\") on node \"crc\" DevicePath \"\"" Sep 29 19:16:14 crc kubenswrapper[4792]: I0929 19:16:14.135038 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fxgsk\" (UniqueName: \"kubernetes.io/projected/3c3fc253-fb19-4845-a099-4754b7a55cdb-kube-api-access-fxgsk\") on node \"crc\" DevicePath \"\"" Sep 29 19:16:14 crc kubenswrapper[4792]: I0929 19:16:14.135137 4792 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/3c3fc253-fb19-4845-a099-4754b7a55cdb-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 19:16:14 crc kubenswrapper[4792]: I0929 19:16:14.135208 4792 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b45ae86a-64ce-47be-a568-021cf9da5107-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 19:16:14 crc kubenswrapper[4792]: I0929 19:16:14.145084 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b45ae86a-64ce-47be-a568-021cf9da5107-config-data" (OuterVolumeSpecName: "config-data") pod "b45ae86a-64ce-47be-a568-021cf9da5107" (UID: "b45ae86a-64ce-47be-a568-021cf9da5107"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:16:14 crc kubenswrapper[4792]: I0929 19:16:14.149126 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3c3fc253-fb19-4845-a099-4754b7a55cdb-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "3c3fc253-fb19-4845-a099-4754b7a55cdb" (UID: "3c3fc253-fb19-4845-a099-4754b7a55cdb"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:16:14 crc kubenswrapper[4792]: I0929 19:16:14.240645 4792 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3c3fc253-fb19-4845-a099-4754b7a55cdb-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 19:16:14 crc kubenswrapper[4792]: I0929 19:16:14.240692 4792 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b45ae86a-64ce-47be-a568-021cf9da5107-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 19:16:14 crc kubenswrapper[4792]: I0929 19:16:14.246109 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-56df8fb6b7-rh9hv" Sep 29 19:16:14 crc kubenswrapper[4792]: I0929 19:16:14.341946 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fc9bf108-a60d-4111-b064-f37789e2d7c1-config\") pod \"fc9bf108-a60d-4111-b064-f37789e2d7c1\" (UID: \"fc9bf108-a60d-4111-b064-f37789e2d7c1\") " Sep 29 19:16:14 crc kubenswrapper[4792]: I0929 19:16:14.342274 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/fc9bf108-a60d-4111-b064-f37789e2d7c1-dns-swift-storage-0\") pod \"fc9bf108-a60d-4111-b064-f37789e2d7c1\" (UID: \"fc9bf108-a60d-4111-b064-f37789e2d7c1\") " Sep 29 19:16:14 crc kubenswrapper[4792]: I0929 19:16:14.342626 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/fc9bf108-a60d-4111-b064-f37789e2d7c1-dns-svc\") pod \"fc9bf108-a60d-4111-b064-f37789e2d7c1\" (UID: \"fc9bf108-a60d-4111-b064-f37789e2d7c1\") " Sep 29 19:16:14 crc kubenswrapper[4792]: I0929 19:16:14.342684 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/fc9bf108-a60d-4111-b064-f37789e2d7c1-ovsdbserver-nb\") pod \"fc9bf108-a60d-4111-b064-f37789e2d7c1\" (UID: \"fc9bf108-a60d-4111-b064-f37789e2d7c1\") " Sep 29 19:16:14 crc kubenswrapper[4792]: I0929 19:16:14.342725 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-stxf8\" (UniqueName: \"kubernetes.io/projected/fc9bf108-a60d-4111-b064-f37789e2d7c1-kube-api-access-stxf8\") pod \"fc9bf108-a60d-4111-b064-f37789e2d7c1\" (UID: \"fc9bf108-a60d-4111-b064-f37789e2d7c1\") " Sep 29 19:16:14 crc kubenswrapper[4792]: I0929 19:16:14.342742 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/fc9bf108-a60d-4111-b064-f37789e2d7c1-ovsdbserver-sb\") pod \"fc9bf108-a60d-4111-b064-f37789e2d7c1\" (UID: \"fc9bf108-a60d-4111-b064-f37789e2d7c1\") " Sep 29 19:16:14 crc kubenswrapper[4792]: I0929 19:16:14.361271 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fc9bf108-a60d-4111-b064-f37789e2d7c1-kube-api-access-stxf8" (OuterVolumeSpecName: "kube-api-access-stxf8") pod "fc9bf108-a60d-4111-b064-f37789e2d7c1" (UID: "fc9bf108-a60d-4111-b064-f37789e2d7c1"). InnerVolumeSpecName "kube-api-access-stxf8". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:16:14 crc kubenswrapper[4792]: I0929 19:16:14.418574 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fc9bf108-a60d-4111-b064-f37789e2d7c1-config" (OuterVolumeSpecName: "config") pod "fc9bf108-a60d-4111-b064-f37789e2d7c1" (UID: "fc9bf108-a60d-4111-b064-f37789e2d7c1"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:16:14 crc kubenswrapper[4792]: I0929 19:16:14.427490 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fc9bf108-a60d-4111-b064-f37789e2d7c1-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "fc9bf108-a60d-4111-b064-f37789e2d7c1" (UID: "fc9bf108-a60d-4111-b064-f37789e2d7c1"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:16:14 crc kubenswrapper[4792]: I0929 19:16:14.438770 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fc9bf108-a60d-4111-b064-f37789e2d7c1-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "fc9bf108-a60d-4111-b064-f37789e2d7c1" (UID: "fc9bf108-a60d-4111-b064-f37789e2d7c1"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:16:14 crc kubenswrapper[4792]: I0929 19:16:14.448906 4792 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/fc9bf108-a60d-4111-b064-f37789e2d7c1-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Sep 29 19:16:14 crc kubenswrapper[4792]: I0929 19:16:14.448936 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-stxf8\" (UniqueName: \"kubernetes.io/projected/fc9bf108-a60d-4111-b064-f37789e2d7c1-kube-api-access-stxf8\") on node \"crc\" DevicePath \"\"" Sep 29 19:16:14 crc kubenswrapper[4792]: I0929 19:16:14.448949 4792 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fc9bf108-a60d-4111-b064-f37789e2d7c1-config\") on node \"crc\" DevicePath \"\"" Sep 29 19:16:14 crc kubenswrapper[4792]: I0929 19:16:14.448958 4792 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/fc9bf108-a60d-4111-b064-f37789e2d7c1-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Sep 29 19:16:14 crc kubenswrapper[4792]: I0929 19:16:14.454272 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fc9bf108-a60d-4111-b064-f37789e2d7c1-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "fc9bf108-a60d-4111-b064-f37789e2d7c1" (UID: "fc9bf108-a60d-4111-b064-f37789e2d7c1"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:16:14 crc kubenswrapper[4792]: I0929 19:16:14.466311 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fc9bf108-a60d-4111-b064-f37789e2d7c1-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "fc9bf108-a60d-4111-b064-f37789e2d7c1" (UID: "fc9bf108-a60d-4111-b064-f37789e2d7c1"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:16:14 crc kubenswrapper[4792]: I0929 19:16:14.550315 4792 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/fc9bf108-a60d-4111-b064-f37789e2d7c1-dns-svc\") on node \"crc\" DevicePath \"\"" Sep 29 19:16:14 crc kubenswrapper[4792]: I0929 19:16:14.550347 4792 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/fc9bf108-a60d-4111-b064-f37789e2d7c1-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Sep 29 19:16:15 crc kubenswrapper[4792]: I0929 19:16:15.009319 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-cvlgm" event={"ID":"bd0405ab-8be9-41cd-aa4d-7cbe44be3049","Type":"ContainerStarted","Data":"d603761937a93c10f270ab88e50f1fcce9c85642279a25c8fc9caf4866875be0"} Sep 29 19:16:15 crc kubenswrapper[4792]: I0929 19:16:15.030684 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-56df8fb6b7-rh9hv" Sep 29 19:16:15 crc kubenswrapper[4792]: I0929 19:16:15.042963 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"ffd2c1db-2c19-492f-8783-f03f235013da","Type":"ContainerStarted","Data":"803f4d15c0202056158ec73d27818f3ea16ced3e14acb9c68a9e4775c034a304"} Sep 29 19:16:15 crc kubenswrapper[4792]: I0929 19:16:15.043002 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-56df8fb6b7-rh9hv" event={"ID":"fc9bf108-a60d-4111-b064-f37789e2d7c1","Type":"ContainerDied","Data":"f0e9707349865bb460b60a1ff263783a2be29ca2beeaef17326ad96c5c93bf56"} Sep 29 19:16:15 crc kubenswrapper[4792]: I0929 19:16:15.043020 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-754c4b8fcb-w2t8n"] Sep 29 19:16:15 crc kubenswrapper[4792]: E0929 19:16:15.043332 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3c3fc253-fb19-4845-a099-4754b7a55cdb" containerName="barbican-db-sync" Sep 29 19:16:15 crc kubenswrapper[4792]: I0929 19:16:15.043344 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="3c3fc253-fb19-4845-a099-4754b7a55cdb" containerName="barbican-db-sync" Sep 29 19:16:15 crc kubenswrapper[4792]: E0929 19:16:15.043356 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1d12f836-d8e3-46a3-bc92-64dae426f114" containerName="keystone-bootstrap" Sep 29 19:16:15 crc kubenswrapper[4792]: I0929 19:16:15.043363 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="1d12f836-d8e3-46a3-bc92-64dae426f114" containerName="keystone-bootstrap" Sep 29 19:16:15 crc kubenswrapper[4792]: E0929 19:16:15.043388 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b45ae86a-64ce-47be-a568-021cf9da5107" containerName="placement-db-sync" Sep 29 19:16:15 crc kubenswrapper[4792]: I0929 19:16:15.043394 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="b45ae86a-64ce-47be-a568-021cf9da5107" containerName="placement-db-sync" Sep 29 19:16:15 crc kubenswrapper[4792]: E0929 19:16:15.043408 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fc9bf108-a60d-4111-b064-f37789e2d7c1" containerName="dnsmasq-dns" Sep 29 19:16:15 crc kubenswrapper[4792]: I0929 19:16:15.043415 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="fc9bf108-a60d-4111-b064-f37789e2d7c1" containerName="dnsmasq-dns" Sep 29 19:16:15 crc kubenswrapper[4792]: E0929 19:16:15.043457 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fc9bf108-a60d-4111-b064-f37789e2d7c1" containerName="init" Sep 29 19:16:15 crc kubenswrapper[4792]: I0929 19:16:15.043470 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="fc9bf108-a60d-4111-b064-f37789e2d7c1" containerName="init" Sep 29 19:16:15 crc kubenswrapper[4792]: I0929 19:16:15.043625 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="1d12f836-d8e3-46a3-bc92-64dae426f114" containerName="keystone-bootstrap" Sep 29 19:16:15 crc kubenswrapper[4792]: I0929 19:16:15.043638 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="b45ae86a-64ce-47be-a568-021cf9da5107" containerName="placement-db-sync" Sep 29 19:16:15 crc kubenswrapper[4792]: I0929 19:16:15.043653 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="fc9bf108-a60d-4111-b064-f37789e2d7c1" containerName="dnsmasq-dns" Sep 29 19:16:15 crc kubenswrapper[4792]: I0929 19:16:15.043668 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="3c3fc253-fb19-4845-a099-4754b7a55cdb" containerName="barbican-db-sync" Sep 29 19:16:15 crc kubenswrapper[4792]: I0929 19:16:15.044185 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-754c4b8fcb-w2t8n" Sep 29 19:16:15 crc kubenswrapper[4792]: I0929 19:16:15.049113 4792 scope.go:117] "RemoveContainer" containerID="ac99206b4580e0813cf9ec4a8332cbb6600a529c76e7de3d16dcd402f5c85615" Sep 29 19:16:15 crc kubenswrapper[4792]: I0929 19:16:15.059126 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-5bsn5" Sep 29 19:16:15 crc kubenswrapper[4792]: I0929 19:16:15.060313 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Sep 29 19:16:15 crc kubenswrapper[4792]: I0929 19:16:15.060509 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Sep 29 19:16:15 crc kubenswrapper[4792]: I0929 19:16:15.060612 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Sep 29 19:16:15 crc kubenswrapper[4792]: I0929 19:16:15.060717 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-keystone-public-svc" Sep 29 19:16:15 crc kubenswrapper[4792]: I0929 19:16:15.061857 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-58chc" Sep 29 19:16:15 crc kubenswrapper[4792]: I0929 19:16:15.062113 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"06538688-0bb7-45ae-a249-94ba5c312b2b","Type":"ContainerStarted","Data":"cca98396e3e20b7a3deaee371a5be5bea401ca418acda758f0e1c53f67c91cd1"} Sep 29 19:16:15 crc kubenswrapper[4792]: I0929 19:16:15.062272 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-keystone-internal-svc" Sep 29 19:16:15 crc kubenswrapper[4792]: I0929 19:16:15.081732 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-db-sync-cvlgm" podStartSLOduration=3.800681196 podStartE2EDuration="53.081714462s" podCreationTimestamp="2025-09-29 19:15:22 +0000 UTC" firstStartedPulling="2025-09-29 19:15:24.66619762 +0000 UTC m=+1136.659505016" lastFinishedPulling="2025-09-29 19:16:13.947230886 +0000 UTC m=+1185.940538282" observedRunningTime="2025-09-29 19:16:15.059824448 +0000 UTC m=+1187.053131864" watchObservedRunningTime="2025-09-29 19:16:15.081714462 +0000 UTC m=+1187.075021858" Sep 29 19:16:15 crc kubenswrapper[4792]: I0929 19:16:15.083015 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-754c4b8fcb-w2t8n"] Sep 29 19:16:15 crc kubenswrapper[4792]: I0929 19:16:15.140923 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-56df8fb6b7-rh9hv"] Sep 29 19:16:15 crc kubenswrapper[4792]: I0929 19:16:15.149343 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-56df8fb6b7-rh9hv"] Sep 29 19:16:15 crc kubenswrapper[4792]: I0929 19:16:15.175733 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/a4b671bb-328e-401e-933f-665848067860-credential-keys\") pod \"keystone-754c4b8fcb-w2t8n\" (UID: \"a4b671bb-328e-401e-933f-665848067860\") " pod="openstack/keystone-754c4b8fcb-w2t8n" Sep 29 19:16:15 crc kubenswrapper[4792]: I0929 19:16:15.175788 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4v4m8\" (UniqueName: \"kubernetes.io/projected/a4b671bb-328e-401e-933f-665848067860-kube-api-access-4v4m8\") pod \"keystone-754c4b8fcb-w2t8n\" (UID: \"a4b671bb-328e-401e-933f-665848067860\") " pod="openstack/keystone-754c4b8fcb-w2t8n" Sep 29 19:16:15 crc kubenswrapper[4792]: I0929 19:16:15.175857 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/a4b671bb-328e-401e-933f-665848067860-internal-tls-certs\") pod \"keystone-754c4b8fcb-w2t8n\" (UID: \"a4b671bb-328e-401e-933f-665848067860\") " pod="openstack/keystone-754c4b8fcb-w2t8n" Sep 29 19:16:15 crc kubenswrapper[4792]: I0929 19:16:15.175897 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a4b671bb-328e-401e-933f-665848067860-combined-ca-bundle\") pod \"keystone-754c4b8fcb-w2t8n\" (UID: \"a4b671bb-328e-401e-933f-665848067860\") " pod="openstack/keystone-754c4b8fcb-w2t8n" Sep 29 19:16:15 crc kubenswrapper[4792]: I0929 19:16:15.175935 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a4b671bb-328e-401e-933f-665848067860-scripts\") pod \"keystone-754c4b8fcb-w2t8n\" (UID: \"a4b671bb-328e-401e-933f-665848067860\") " pod="openstack/keystone-754c4b8fcb-w2t8n" Sep 29 19:16:15 crc kubenswrapper[4792]: I0929 19:16:15.176227 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/a4b671bb-328e-401e-933f-665848067860-public-tls-certs\") pod \"keystone-754c4b8fcb-w2t8n\" (UID: \"a4b671bb-328e-401e-933f-665848067860\") " pod="openstack/keystone-754c4b8fcb-w2t8n" Sep 29 19:16:15 crc kubenswrapper[4792]: I0929 19:16:15.176268 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/a4b671bb-328e-401e-933f-665848067860-fernet-keys\") pod \"keystone-754c4b8fcb-w2t8n\" (UID: \"a4b671bb-328e-401e-933f-665848067860\") " pod="openstack/keystone-754c4b8fcb-w2t8n" Sep 29 19:16:15 crc kubenswrapper[4792]: I0929 19:16:15.176295 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a4b671bb-328e-401e-933f-665848067860-config-data\") pod \"keystone-754c4b8fcb-w2t8n\" (UID: \"a4b671bb-328e-401e-933f-665848067860\") " pod="openstack/keystone-754c4b8fcb-w2t8n" Sep 29 19:16:15 crc kubenswrapper[4792]: I0929 19:16:15.189354 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-666f667548-cm9kb"] Sep 29 19:16:15 crc kubenswrapper[4792]: I0929 19:16:15.192842 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-666f667548-cm9kb" Sep 29 19:16:15 crc kubenswrapper[4792]: I0929 19:16:15.201420 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-scripts" Sep 29 19:16:15 crc kubenswrapper[4792]: I0929 19:16:15.201632 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-placement-public-svc" Sep 29 19:16:15 crc kubenswrapper[4792]: I0929 19:16:15.201734 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-placement-dockercfg-5jnkz" Sep 29 19:16:15 crc kubenswrapper[4792]: I0929 19:16:15.201966 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-config-data" Sep 29 19:16:15 crc kubenswrapper[4792]: I0929 19:16:15.202452 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-placement-internal-svc" Sep 29 19:16:15 crc kubenswrapper[4792]: I0929 19:16:15.222794 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-666f667548-cm9kb"] Sep 29 19:16:15 crc kubenswrapper[4792]: I0929 19:16:15.261654 4792 scope.go:117] "RemoveContainer" containerID="0a4ebe62cd782eb9c1393c70cccbc338fd92640d7163d46a149daaaf9db2d5db" Sep 29 19:16:15 crc kubenswrapper[4792]: I0929 19:16:15.281280 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7nq6b\" (UniqueName: \"kubernetes.io/projected/d463ed77-f74f-4724-b942-1f542755d4d4-kube-api-access-7nq6b\") pod \"placement-666f667548-cm9kb\" (UID: \"d463ed77-f74f-4724-b942-1f542755d4d4\") " pod="openstack/placement-666f667548-cm9kb" Sep 29 19:16:15 crc kubenswrapper[4792]: I0929 19:16:15.281339 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/a4b671bb-328e-401e-933f-665848067860-internal-tls-certs\") pod \"keystone-754c4b8fcb-w2t8n\" (UID: \"a4b671bb-328e-401e-933f-665848067860\") " pod="openstack/keystone-754c4b8fcb-w2t8n" Sep 29 19:16:15 crc kubenswrapper[4792]: I0929 19:16:15.281396 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d463ed77-f74f-4724-b942-1f542755d4d4-combined-ca-bundle\") pod \"placement-666f667548-cm9kb\" (UID: \"d463ed77-f74f-4724-b942-1f542755d4d4\") " pod="openstack/placement-666f667548-cm9kb" Sep 29 19:16:15 crc kubenswrapper[4792]: I0929 19:16:15.281427 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a4b671bb-328e-401e-933f-665848067860-combined-ca-bundle\") pod \"keystone-754c4b8fcb-w2t8n\" (UID: \"a4b671bb-328e-401e-933f-665848067860\") " pod="openstack/keystone-754c4b8fcb-w2t8n" Sep 29 19:16:15 crc kubenswrapper[4792]: I0929 19:16:15.281461 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d463ed77-f74f-4724-b942-1f542755d4d4-config-data\") pod \"placement-666f667548-cm9kb\" (UID: \"d463ed77-f74f-4724-b942-1f542755d4d4\") " pod="openstack/placement-666f667548-cm9kb" Sep 29 19:16:15 crc kubenswrapper[4792]: I0929 19:16:15.281497 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a4b671bb-328e-401e-933f-665848067860-scripts\") pod \"keystone-754c4b8fcb-w2t8n\" (UID: \"a4b671bb-328e-401e-933f-665848067860\") " pod="openstack/keystone-754c4b8fcb-w2t8n" Sep 29 19:16:15 crc kubenswrapper[4792]: I0929 19:16:15.281547 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/d463ed77-f74f-4724-b942-1f542755d4d4-public-tls-certs\") pod \"placement-666f667548-cm9kb\" (UID: \"d463ed77-f74f-4724-b942-1f542755d4d4\") " pod="openstack/placement-666f667548-cm9kb" Sep 29 19:16:15 crc kubenswrapper[4792]: I0929 19:16:15.281579 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/d463ed77-f74f-4724-b942-1f542755d4d4-internal-tls-certs\") pod \"placement-666f667548-cm9kb\" (UID: \"d463ed77-f74f-4724-b942-1f542755d4d4\") " pod="openstack/placement-666f667548-cm9kb" Sep 29 19:16:15 crc kubenswrapper[4792]: I0929 19:16:15.281649 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d463ed77-f74f-4724-b942-1f542755d4d4-logs\") pod \"placement-666f667548-cm9kb\" (UID: \"d463ed77-f74f-4724-b942-1f542755d4d4\") " pod="openstack/placement-666f667548-cm9kb" Sep 29 19:16:15 crc kubenswrapper[4792]: I0929 19:16:15.281683 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d463ed77-f74f-4724-b942-1f542755d4d4-scripts\") pod \"placement-666f667548-cm9kb\" (UID: \"d463ed77-f74f-4724-b942-1f542755d4d4\") " pod="openstack/placement-666f667548-cm9kb" Sep 29 19:16:15 crc kubenswrapper[4792]: I0929 19:16:15.281715 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/a4b671bb-328e-401e-933f-665848067860-public-tls-certs\") pod \"keystone-754c4b8fcb-w2t8n\" (UID: \"a4b671bb-328e-401e-933f-665848067860\") " pod="openstack/keystone-754c4b8fcb-w2t8n" Sep 29 19:16:15 crc kubenswrapper[4792]: I0929 19:16:15.281774 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/a4b671bb-328e-401e-933f-665848067860-fernet-keys\") pod \"keystone-754c4b8fcb-w2t8n\" (UID: \"a4b671bb-328e-401e-933f-665848067860\") " pod="openstack/keystone-754c4b8fcb-w2t8n" Sep 29 19:16:15 crc kubenswrapper[4792]: I0929 19:16:15.281798 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a4b671bb-328e-401e-933f-665848067860-config-data\") pod \"keystone-754c4b8fcb-w2t8n\" (UID: \"a4b671bb-328e-401e-933f-665848067860\") " pod="openstack/keystone-754c4b8fcb-w2t8n" Sep 29 19:16:15 crc kubenswrapper[4792]: I0929 19:16:15.281843 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/a4b671bb-328e-401e-933f-665848067860-credential-keys\") pod \"keystone-754c4b8fcb-w2t8n\" (UID: \"a4b671bb-328e-401e-933f-665848067860\") " pod="openstack/keystone-754c4b8fcb-w2t8n" Sep 29 19:16:15 crc kubenswrapper[4792]: I0929 19:16:15.281898 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4v4m8\" (UniqueName: \"kubernetes.io/projected/a4b671bb-328e-401e-933f-665848067860-kube-api-access-4v4m8\") pod \"keystone-754c4b8fcb-w2t8n\" (UID: \"a4b671bb-328e-401e-933f-665848067860\") " pod="openstack/keystone-754c4b8fcb-w2t8n" Sep 29 19:16:15 crc kubenswrapper[4792]: I0929 19:16:15.293281 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/a4b671bb-328e-401e-933f-665848067860-internal-tls-certs\") pod \"keystone-754c4b8fcb-w2t8n\" (UID: \"a4b671bb-328e-401e-933f-665848067860\") " pod="openstack/keystone-754c4b8fcb-w2t8n" Sep 29 19:16:15 crc kubenswrapper[4792]: I0929 19:16:15.297088 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/a4b671bb-328e-401e-933f-665848067860-public-tls-certs\") pod \"keystone-754c4b8fcb-w2t8n\" (UID: \"a4b671bb-328e-401e-933f-665848067860\") " pod="openstack/keystone-754c4b8fcb-w2t8n" Sep 29 19:16:15 crc kubenswrapper[4792]: I0929 19:16:15.320814 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-worker-5697845c85-cnq66"] Sep 29 19:16:15 crc kubenswrapper[4792]: I0929 19:16:15.322466 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-5697845c85-cnq66" Sep 29 19:16:15 crc kubenswrapper[4792]: I0929 19:16:15.324340 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a4b671bb-328e-401e-933f-665848067860-combined-ca-bundle\") pod \"keystone-754c4b8fcb-w2t8n\" (UID: \"a4b671bb-328e-401e-933f-665848067860\") " pod="openstack/keystone-754c4b8fcb-w2t8n" Sep 29 19:16:15 crc kubenswrapper[4792]: I0929 19:16:15.326142 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a4b671bb-328e-401e-933f-665848067860-scripts\") pod \"keystone-754c4b8fcb-w2t8n\" (UID: \"a4b671bb-328e-401e-933f-665848067860\") " pod="openstack/keystone-754c4b8fcb-w2t8n" Sep 29 19:16:15 crc kubenswrapper[4792]: I0929 19:16:15.326313 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/a4b671bb-328e-401e-933f-665848067860-credential-keys\") pod \"keystone-754c4b8fcb-w2t8n\" (UID: \"a4b671bb-328e-401e-933f-665848067860\") " pod="openstack/keystone-754c4b8fcb-w2t8n" Sep 29 19:16:15 crc kubenswrapper[4792]: I0929 19:16:15.327073 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/a4b671bb-328e-401e-933f-665848067860-fernet-keys\") pod \"keystone-754c4b8fcb-w2t8n\" (UID: \"a4b671bb-328e-401e-933f-665848067860\") " pod="openstack/keystone-754c4b8fcb-w2t8n" Sep 29 19:16:15 crc kubenswrapper[4792]: I0929 19:16:15.330017 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a4b671bb-328e-401e-933f-665848067860-config-data\") pod \"keystone-754c4b8fcb-w2t8n\" (UID: \"a4b671bb-328e-401e-933f-665848067860\") " pod="openstack/keystone-754c4b8fcb-w2t8n" Sep 29 19:16:15 crc kubenswrapper[4792]: I0929 19:16:15.359038 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-worker-config-data" Sep 29 19:16:15 crc kubenswrapper[4792]: I0929 19:16:15.359381 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-barbican-dockercfg-pkvl7" Sep 29 19:16:15 crc kubenswrapper[4792]: I0929 19:16:15.359444 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-config-data" Sep 29 19:16:15 crc kubenswrapper[4792]: I0929 19:16:15.385057 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/d463ed77-f74f-4724-b942-1f542755d4d4-public-tls-certs\") pod \"placement-666f667548-cm9kb\" (UID: \"d463ed77-f74f-4724-b942-1f542755d4d4\") " pod="openstack/placement-666f667548-cm9kb" Sep 29 19:16:15 crc kubenswrapper[4792]: I0929 19:16:15.385099 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/d463ed77-f74f-4724-b942-1f542755d4d4-internal-tls-certs\") pod \"placement-666f667548-cm9kb\" (UID: \"d463ed77-f74f-4724-b942-1f542755d4d4\") " pod="openstack/placement-666f667548-cm9kb" Sep 29 19:16:15 crc kubenswrapper[4792]: I0929 19:16:15.385129 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c4ee74b8-8ac4-4a34-967b-6fcb220e90fa-combined-ca-bundle\") pod \"barbican-worker-5697845c85-cnq66\" (UID: \"c4ee74b8-8ac4-4a34-967b-6fcb220e90fa\") " pod="openstack/barbican-worker-5697845c85-cnq66" Sep 29 19:16:15 crc kubenswrapper[4792]: I0929 19:16:15.385160 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q69kq\" (UniqueName: \"kubernetes.io/projected/c4ee74b8-8ac4-4a34-967b-6fcb220e90fa-kube-api-access-q69kq\") pod \"barbican-worker-5697845c85-cnq66\" (UID: \"c4ee74b8-8ac4-4a34-967b-6fcb220e90fa\") " pod="openstack/barbican-worker-5697845c85-cnq66" Sep 29 19:16:15 crc kubenswrapper[4792]: I0929 19:16:15.385195 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d463ed77-f74f-4724-b942-1f542755d4d4-logs\") pod \"placement-666f667548-cm9kb\" (UID: \"d463ed77-f74f-4724-b942-1f542755d4d4\") " pod="openstack/placement-666f667548-cm9kb" Sep 29 19:16:15 crc kubenswrapper[4792]: I0929 19:16:15.385216 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d463ed77-f74f-4724-b942-1f542755d4d4-scripts\") pod \"placement-666f667548-cm9kb\" (UID: \"d463ed77-f74f-4724-b942-1f542755d4d4\") " pod="openstack/placement-666f667548-cm9kb" Sep 29 19:16:15 crc kubenswrapper[4792]: I0929 19:16:15.385277 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7nq6b\" (UniqueName: \"kubernetes.io/projected/d463ed77-f74f-4724-b942-1f542755d4d4-kube-api-access-7nq6b\") pod \"placement-666f667548-cm9kb\" (UID: \"d463ed77-f74f-4724-b942-1f542755d4d4\") " pod="openstack/placement-666f667548-cm9kb" Sep 29 19:16:15 crc kubenswrapper[4792]: I0929 19:16:15.385295 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c4ee74b8-8ac4-4a34-967b-6fcb220e90fa-config-data\") pod \"barbican-worker-5697845c85-cnq66\" (UID: \"c4ee74b8-8ac4-4a34-967b-6fcb220e90fa\") " pod="openstack/barbican-worker-5697845c85-cnq66" Sep 29 19:16:15 crc kubenswrapper[4792]: I0929 19:16:15.385323 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/c4ee74b8-8ac4-4a34-967b-6fcb220e90fa-config-data-custom\") pod \"barbican-worker-5697845c85-cnq66\" (UID: \"c4ee74b8-8ac4-4a34-967b-6fcb220e90fa\") " pod="openstack/barbican-worker-5697845c85-cnq66" Sep 29 19:16:15 crc kubenswrapper[4792]: I0929 19:16:15.385345 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d463ed77-f74f-4724-b942-1f542755d4d4-combined-ca-bundle\") pod \"placement-666f667548-cm9kb\" (UID: \"d463ed77-f74f-4724-b942-1f542755d4d4\") " pod="openstack/placement-666f667548-cm9kb" Sep 29 19:16:15 crc kubenswrapper[4792]: I0929 19:16:15.385371 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d463ed77-f74f-4724-b942-1f542755d4d4-config-data\") pod \"placement-666f667548-cm9kb\" (UID: \"d463ed77-f74f-4724-b942-1f542755d4d4\") " pod="openstack/placement-666f667548-cm9kb" Sep 29 19:16:15 crc kubenswrapper[4792]: I0929 19:16:15.385399 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c4ee74b8-8ac4-4a34-967b-6fcb220e90fa-logs\") pod \"barbican-worker-5697845c85-cnq66\" (UID: \"c4ee74b8-8ac4-4a34-967b-6fcb220e90fa\") " pod="openstack/barbican-worker-5697845c85-cnq66" Sep 29 19:16:15 crc kubenswrapper[4792]: I0929 19:16:15.385573 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4v4m8\" (UniqueName: \"kubernetes.io/projected/a4b671bb-328e-401e-933f-665848067860-kube-api-access-4v4m8\") pod \"keystone-754c4b8fcb-w2t8n\" (UID: \"a4b671bb-328e-401e-933f-665848067860\") " pod="openstack/keystone-754c4b8fcb-w2t8n" Sep 29 19:16:15 crc kubenswrapper[4792]: I0929 19:16:15.386225 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d463ed77-f74f-4724-b942-1f542755d4d4-logs\") pod \"placement-666f667548-cm9kb\" (UID: \"d463ed77-f74f-4724-b942-1f542755d4d4\") " pod="openstack/placement-666f667548-cm9kb" Sep 29 19:16:15 crc kubenswrapper[4792]: I0929 19:16:15.387294 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-worker-5697845c85-cnq66"] Sep 29 19:16:15 crc kubenswrapper[4792]: I0929 19:16:15.388924 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/d463ed77-f74f-4724-b942-1f542755d4d4-public-tls-certs\") pod \"placement-666f667548-cm9kb\" (UID: \"d463ed77-f74f-4724-b942-1f542755d4d4\") " pod="openstack/placement-666f667548-cm9kb" Sep 29 19:16:15 crc kubenswrapper[4792]: I0929 19:16:15.396357 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-754c4b8fcb-w2t8n" Sep 29 19:16:15 crc kubenswrapper[4792]: I0929 19:16:15.397393 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d463ed77-f74f-4724-b942-1f542755d4d4-config-data\") pod \"placement-666f667548-cm9kb\" (UID: \"d463ed77-f74f-4724-b942-1f542755d4d4\") " pod="openstack/placement-666f667548-cm9kb" Sep 29 19:16:15 crc kubenswrapper[4792]: I0929 19:16:15.415673 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-keystone-listener-6864b589b6-rj9q8"] Sep 29 19:16:15 crc kubenswrapper[4792]: I0929 19:16:15.425517 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d463ed77-f74f-4724-b942-1f542755d4d4-scripts\") pod \"placement-666f667548-cm9kb\" (UID: \"d463ed77-f74f-4724-b942-1f542755d4d4\") " pod="openstack/placement-666f667548-cm9kb" Sep 29 19:16:15 crc kubenswrapper[4792]: I0929 19:16:15.430357 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-6864b589b6-rj9q8" Sep 29 19:16:15 crc kubenswrapper[4792]: I0929 19:16:15.430574 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/d463ed77-f74f-4724-b942-1f542755d4d4-internal-tls-certs\") pod \"placement-666f667548-cm9kb\" (UID: \"d463ed77-f74f-4724-b942-1f542755d4d4\") " pod="openstack/placement-666f667548-cm9kb" Sep 29 19:16:15 crc kubenswrapper[4792]: I0929 19:16:15.440030 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-keystone-listener-config-data" Sep 29 19:16:15 crc kubenswrapper[4792]: I0929 19:16:15.441787 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-keystone-listener-6864b589b6-rj9q8"] Sep 29 19:16:15 crc kubenswrapper[4792]: I0929 19:16:15.446762 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7nq6b\" (UniqueName: \"kubernetes.io/projected/d463ed77-f74f-4724-b942-1f542755d4d4-kube-api-access-7nq6b\") pod \"placement-666f667548-cm9kb\" (UID: \"d463ed77-f74f-4724-b942-1f542755d4d4\") " pod="openstack/placement-666f667548-cm9kb" Sep 29 19:16:15 crc kubenswrapper[4792]: I0929 19:16:15.458599 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d463ed77-f74f-4724-b942-1f542755d4d4-combined-ca-bundle\") pod \"placement-666f667548-cm9kb\" (UID: \"d463ed77-f74f-4724-b942-1f542755d4d4\") " pod="openstack/placement-666f667548-cm9kb" Sep 29 19:16:15 crc kubenswrapper[4792]: I0929 19:16:15.487770 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c4ee74b8-8ac4-4a34-967b-6fcb220e90fa-config-data\") pod \"barbican-worker-5697845c85-cnq66\" (UID: \"c4ee74b8-8ac4-4a34-967b-6fcb220e90fa\") " pod="openstack/barbican-worker-5697845c85-cnq66" Sep 29 19:16:15 crc kubenswrapper[4792]: I0929 19:16:15.487856 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/799e0c59-e4f3-4d1c-8b3e-4771a3d5fecb-config-data\") pod \"barbican-keystone-listener-6864b589b6-rj9q8\" (UID: \"799e0c59-e4f3-4d1c-8b3e-4771a3d5fecb\") " pod="openstack/barbican-keystone-listener-6864b589b6-rj9q8" Sep 29 19:16:15 crc kubenswrapper[4792]: I0929 19:16:15.487879 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/799e0c59-e4f3-4d1c-8b3e-4771a3d5fecb-config-data-custom\") pod \"barbican-keystone-listener-6864b589b6-rj9q8\" (UID: \"799e0c59-e4f3-4d1c-8b3e-4771a3d5fecb\") " pod="openstack/barbican-keystone-listener-6864b589b6-rj9q8" Sep 29 19:16:15 crc kubenswrapper[4792]: I0929 19:16:15.487906 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/c4ee74b8-8ac4-4a34-967b-6fcb220e90fa-config-data-custom\") pod \"barbican-worker-5697845c85-cnq66\" (UID: \"c4ee74b8-8ac4-4a34-967b-6fcb220e90fa\") " pod="openstack/barbican-worker-5697845c85-cnq66" Sep 29 19:16:15 crc kubenswrapper[4792]: I0929 19:16:15.488040 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c4ee74b8-8ac4-4a34-967b-6fcb220e90fa-logs\") pod \"barbican-worker-5697845c85-cnq66\" (UID: \"c4ee74b8-8ac4-4a34-967b-6fcb220e90fa\") " pod="openstack/barbican-worker-5697845c85-cnq66" Sep 29 19:16:15 crc kubenswrapper[4792]: I0929 19:16:15.488076 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c4ee74b8-8ac4-4a34-967b-6fcb220e90fa-combined-ca-bundle\") pod \"barbican-worker-5697845c85-cnq66\" (UID: \"c4ee74b8-8ac4-4a34-967b-6fcb220e90fa\") " pod="openstack/barbican-worker-5697845c85-cnq66" Sep 29 19:16:15 crc kubenswrapper[4792]: I0929 19:16:15.488101 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/799e0c59-e4f3-4d1c-8b3e-4771a3d5fecb-combined-ca-bundle\") pod \"barbican-keystone-listener-6864b589b6-rj9q8\" (UID: \"799e0c59-e4f3-4d1c-8b3e-4771a3d5fecb\") " pod="openstack/barbican-keystone-listener-6864b589b6-rj9q8" Sep 29 19:16:15 crc kubenswrapper[4792]: I0929 19:16:15.488120 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q69kq\" (UniqueName: \"kubernetes.io/projected/c4ee74b8-8ac4-4a34-967b-6fcb220e90fa-kube-api-access-q69kq\") pod \"barbican-worker-5697845c85-cnq66\" (UID: \"c4ee74b8-8ac4-4a34-967b-6fcb220e90fa\") " pod="openstack/barbican-worker-5697845c85-cnq66" Sep 29 19:16:15 crc kubenswrapper[4792]: I0929 19:16:15.488175 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tk476\" (UniqueName: \"kubernetes.io/projected/799e0c59-e4f3-4d1c-8b3e-4771a3d5fecb-kube-api-access-tk476\") pod \"barbican-keystone-listener-6864b589b6-rj9q8\" (UID: \"799e0c59-e4f3-4d1c-8b3e-4771a3d5fecb\") " pod="openstack/barbican-keystone-listener-6864b589b6-rj9q8" Sep 29 19:16:15 crc kubenswrapper[4792]: I0929 19:16:15.488200 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/799e0c59-e4f3-4d1c-8b3e-4771a3d5fecb-logs\") pod \"barbican-keystone-listener-6864b589b6-rj9q8\" (UID: \"799e0c59-e4f3-4d1c-8b3e-4771a3d5fecb\") " pod="openstack/barbican-keystone-listener-6864b589b6-rj9q8" Sep 29 19:16:15 crc kubenswrapper[4792]: I0929 19:16:15.489925 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c4ee74b8-8ac4-4a34-967b-6fcb220e90fa-logs\") pod \"barbican-worker-5697845c85-cnq66\" (UID: \"c4ee74b8-8ac4-4a34-967b-6fcb220e90fa\") " pod="openstack/barbican-worker-5697845c85-cnq66" Sep 29 19:16:15 crc kubenswrapper[4792]: I0929 19:16:15.501420 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/c4ee74b8-8ac4-4a34-967b-6fcb220e90fa-config-data-custom\") pod \"barbican-worker-5697845c85-cnq66\" (UID: \"c4ee74b8-8ac4-4a34-967b-6fcb220e90fa\") " pod="openstack/barbican-worker-5697845c85-cnq66" Sep 29 19:16:15 crc kubenswrapper[4792]: I0929 19:16:15.504281 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c4ee74b8-8ac4-4a34-967b-6fcb220e90fa-combined-ca-bundle\") pod \"barbican-worker-5697845c85-cnq66\" (UID: \"c4ee74b8-8ac4-4a34-967b-6fcb220e90fa\") " pod="openstack/barbican-worker-5697845c85-cnq66" Sep 29 19:16:15 crc kubenswrapper[4792]: I0929 19:16:15.535102 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-666f667548-cm9kb" Sep 29 19:16:15 crc kubenswrapper[4792]: I0929 19:16:15.589599 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q69kq\" (UniqueName: \"kubernetes.io/projected/c4ee74b8-8ac4-4a34-967b-6fcb220e90fa-kube-api-access-q69kq\") pod \"barbican-worker-5697845c85-cnq66\" (UID: \"c4ee74b8-8ac4-4a34-967b-6fcb220e90fa\") " pod="openstack/barbican-worker-5697845c85-cnq66" Sep 29 19:16:15 crc kubenswrapper[4792]: I0929 19:16:15.599982 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/799e0c59-e4f3-4d1c-8b3e-4771a3d5fecb-combined-ca-bundle\") pod \"barbican-keystone-listener-6864b589b6-rj9q8\" (UID: \"799e0c59-e4f3-4d1c-8b3e-4771a3d5fecb\") " pod="openstack/barbican-keystone-listener-6864b589b6-rj9q8" Sep 29 19:16:15 crc kubenswrapper[4792]: I0929 19:16:15.600212 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tk476\" (UniqueName: \"kubernetes.io/projected/799e0c59-e4f3-4d1c-8b3e-4771a3d5fecb-kube-api-access-tk476\") pod \"barbican-keystone-listener-6864b589b6-rj9q8\" (UID: \"799e0c59-e4f3-4d1c-8b3e-4771a3d5fecb\") " pod="openstack/barbican-keystone-listener-6864b589b6-rj9q8" Sep 29 19:16:15 crc kubenswrapper[4792]: I0929 19:16:15.600325 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/799e0c59-e4f3-4d1c-8b3e-4771a3d5fecb-logs\") pod \"barbican-keystone-listener-6864b589b6-rj9q8\" (UID: \"799e0c59-e4f3-4d1c-8b3e-4771a3d5fecb\") " pod="openstack/barbican-keystone-listener-6864b589b6-rj9q8" Sep 29 19:16:15 crc kubenswrapper[4792]: I0929 19:16:15.600501 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/799e0c59-e4f3-4d1c-8b3e-4771a3d5fecb-config-data\") pod \"barbican-keystone-listener-6864b589b6-rj9q8\" (UID: \"799e0c59-e4f3-4d1c-8b3e-4771a3d5fecb\") " pod="openstack/barbican-keystone-listener-6864b589b6-rj9q8" Sep 29 19:16:15 crc kubenswrapper[4792]: I0929 19:16:15.600543 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/799e0c59-e4f3-4d1c-8b3e-4771a3d5fecb-config-data-custom\") pod \"barbican-keystone-listener-6864b589b6-rj9q8\" (UID: \"799e0c59-e4f3-4d1c-8b3e-4771a3d5fecb\") " pod="openstack/barbican-keystone-listener-6864b589b6-rj9q8" Sep 29 19:16:15 crc kubenswrapper[4792]: I0929 19:16:15.602306 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/799e0c59-e4f3-4d1c-8b3e-4771a3d5fecb-logs\") pod \"barbican-keystone-listener-6864b589b6-rj9q8\" (UID: \"799e0c59-e4f3-4d1c-8b3e-4771a3d5fecb\") " pod="openstack/barbican-keystone-listener-6864b589b6-rj9q8" Sep 29 19:16:15 crc kubenswrapper[4792]: I0929 19:16:15.604701 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/799e0c59-e4f3-4d1c-8b3e-4771a3d5fecb-combined-ca-bundle\") pod \"barbican-keystone-listener-6864b589b6-rj9q8\" (UID: \"799e0c59-e4f3-4d1c-8b3e-4771a3d5fecb\") " pod="openstack/barbican-keystone-listener-6864b589b6-rj9q8" Sep 29 19:16:15 crc kubenswrapper[4792]: I0929 19:16:15.638927 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-848cf88cfc-kqs7x"] Sep 29 19:16:15 crc kubenswrapper[4792]: I0929 19:16:15.641406 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tk476\" (UniqueName: \"kubernetes.io/projected/799e0c59-e4f3-4d1c-8b3e-4771a3d5fecb-kube-api-access-tk476\") pod \"barbican-keystone-listener-6864b589b6-rj9q8\" (UID: \"799e0c59-e4f3-4d1c-8b3e-4771a3d5fecb\") " pod="openstack/barbican-keystone-listener-6864b589b6-rj9q8" Sep 29 19:16:15 crc kubenswrapper[4792]: I0929 19:16:15.641530 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-848cf88cfc-kqs7x" Sep 29 19:16:15 crc kubenswrapper[4792]: I0929 19:16:15.662885 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/799e0c59-e4f3-4d1c-8b3e-4771a3d5fecb-config-data-custom\") pod \"barbican-keystone-listener-6864b589b6-rj9q8\" (UID: \"799e0c59-e4f3-4d1c-8b3e-4771a3d5fecb\") " pod="openstack/barbican-keystone-listener-6864b589b6-rj9q8" Sep 29 19:16:15 crc kubenswrapper[4792]: I0929 19:16:15.668134 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/799e0c59-e4f3-4d1c-8b3e-4771a3d5fecb-config-data\") pod \"barbican-keystone-listener-6864b589b6-rj9q8\" (UID: \"799e0c59-e4f3-4d1c-8b3e-4771a3d5fecb\") " pod="openstack/barbican-keystone-listener-6864b589b6-rj9q8" Sep 29 19:16:15 crc kubenswrapper[4792]: I0929 19:16:15.689945 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c4ee74b8-8ac4-4a34-967b-6fcb220e90fa-config-data\") pod \"barbican-worker-5697845c85-cnq66\" (UID: \"c4ee74b8-8ac4-4a34-967b-6fcb220e90fa\") " pod="openstack/barbican-worker-5697845c85-cnq66" Sep 29 19:16:15 crc kubenswrapper[4792]: I0929 19:16:15.744781 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-848cf88cfc-kqs7x"] Sep 29 19:16:15 crc kubenswrapper[4792]: I0929 19:16:15.791577 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-api-7d954476d6-ngzb5"] Sep 29 19:16:15 crc kubenswrapper[4792]: I0929 19:16:15.793456 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-7d954476d6-ngzb5" Sep 29 19:16:15 crc kubenswrapper[4792]: I0929 19:16:15.835870 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/2465ec19-7e92-4be1-9a98-3d22a1553089-dns-swift-storage-0\") pod \"dnsmasq-dns-848cf88cfc-kqs7x\" (UID: \"2465ec19-7e92-4be1-9a98-3d22a1553089\") " pod="openstack/dnsmasq-dns-848cf88cfc-kqs7x" Sep 29 19:16:15 crc kubenswrapper[4792]: I0929 19:16:15.835935 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/2465ec19-7e92-4be1-9a98-3d22a1553089-ovsdbserver-nb\") pod \"dnsmasq-dns-848cf88cfc-kqs7x\" (UID: \"2465ec19-7e92-4be1-9a98-3d22a1553089\") " pod="openstack/dnsmasq-dns-848cf88cfc-kqs7x" Sep 29 19:16:15 crc kubenswrapper[4792]: I0929 19:16:15.836001 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9w7jn\" (UniqueName: \"kubernetes.io/projected/2465ec19-7e92-4be1-9a98-3d22a1553089-kube-api-access-9w7jn\") pod \"dnsmasq-dns-848cf88cfc-kqs7x\" (UID: \"2465ec19-7e92-4be1-9a98-3d22a1553089\") " pod="openstack/dnsmasq-dns-848cf88cfc-kqs7x" Sep 29 19:16:15 crc kubenswrapper[4792]: I0929 19:16:15.836077 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2465ec19-7e92-4be1-9a98-3d22a1553089-config\") pod \"dnsmasq-dns-848cf88cfc-kqs7x\" (UID: \"2465ec19-7e92-4be1-9a98-3d22a1553089\") " pod="openstack/dnsmasq-dns-848cf88cfc-kqs7x" Sep 29 19:16:15 crc kubenswrapper[4792]: I0929 19:16:15.836163 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2465ec19-7e92-4be1-9a98-3d22a1553089-dns-svc\") pod \"dnsmasq-dns-848cf88cfc-kqs7x\" (UID: \"2465ec19-7e92-4be1-9a98-3d22a1553089\") " pod="openstack/dnsmasq-dns-848cf88cfc-kqs7x" Sep 29 19:16:15 crc kubenswrapper[4792]: I0929 19:16:15.836198 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/2465ec19-7e92-4be1-9a98-3d22a1553089-ovsdbserver-sb\") pod \"dnsmasq-dns-848cf88cfc-kqs7x\" (UID: \"2465ec19-7e92-4be1-9a98-3d22a1553089\") " pod="openstack/dnsmasq-dns-848cf88cfc-kqs7x" Sep 29 19:16:15 crc kubenswrapper[4792]: I0929 19:16:15.836428 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-api-config-data" Sep 29 19:16:15 crc kubenswrapper[4792]: I0929 19:16:15.837292 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-6864b589b6-rj9q8" Sep 29 19:16:15 crc kubenswrapper[4792]: I0929 19:16:15.837767 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-5697845c85-cnq66" Sep 29 19:16:15 crc kubenswrapper[4792]: I0929 19:16:15.879044 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-7d954476d6-ngzb5"] Sep 29 19:16:15 crc kubenswrapper[4792]: I0929 19:16:15.940000 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7602636e-8093-4eba-90f6-d5fbe8273356-logs\") pod \"barbican-api-7d954476d6-ngzb5\" (UID: \"7602636e-8093-4eba-90f6-d5fbe8273356\") " pod="openstack/barbican-api-7d954476d6-ngzb5" Sep 29 19:16:15 crc kubenswrapper[4792]: I0929 19:16:15.940048 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/7602636e-8093-4eba-90f6-d5fbe8273356-config-data-custom\") pod \"barbican-api-7d954476d6-ngzb5\" (UID: \"7602636e-8093-4eba-90f6-d5fbe8273356\") " pod="openstack/barbican-api-7d954476d6-ngzb5" Sep 29 19:16:15 crc kubenswrapper[4792]: I0929 19:16:15.940078 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2465ec19-7e92-4be1-9a98-3d22a1553089-config\") pod \"dnsmasq-dns-848cf88cfc-kqs7x\" (UID: \"2465ec19-7e92-4be1-9a98-3d22a1553089\") " pod="openstack/dnsmasq-dns-848cf88cfc-kqs7x" Sep 29 19:16:15 crc kubenswrapper[4792]: I0929 19:16:15.940347 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2465ec19-7e92-4be1-9a98-3d22a1553089-dns-svc\") pod \"dnsmasq-dns-848cf88cfc-kqs7x\" (UID: \"2465ec19-7e92-4be1-9a98-3d22a1553089\") " pod="openstack/dnsmasq-dns-848cf88cfc-kqs7x" Sep 29 19:16:15 crc kubenswrapper[4792]: I0929 19:16:15.940379 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/2465ec19-7e92-4be1-9a98-3d22a1553089-ovsdbserver-sb\") pod \"dnsmasq-dns-848cf88cfc-kqs7x\" (UID: \"2465ec19-7e92-4be1-9a98-3d22a1553089\") " pod="openstack/dnsmasq-dns-848cf88cfc-kqs7x" Sep 29 19:16:15 crc kubenswrapper[4792]: I0929 19:16:15.940415 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/2465ec19-7e92-4be1-9a98-3d22a1553089-dns-swift-storage-0\") pod \"dnsmasq-dns-848cf88cfc-kqs7x\" (UID: \"2465ec19-7e92-4be1-9a98-3d22a1553089\") " pod="openstack/dnsmasq-dns-848cf88cfc-kqs7x" Sep 29 19:16:15 crc kubenswrapper[4792]: I0929 19:16:15.940434 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/2465ec19-7e92-4be1-9a98-3d22a1553089-ovsdbserver-nb\") pod \"dnsmasq-dns-848cf88cfc-kqs7x\" (UID: \"2465ec19-7e92-4be1-9a98-3d22a1553089\") " pod="openstack/dnsmasq-dns-848cf88cfc-kqs7x" Sep 29 19:16:15 crc kubenswrapper[4792]: I0929 19:16:15.940459 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7602636e-8093-4eba-90f6-d5fbe8273356-combined-ca-bundle\") pod \"barbican-api-7d954476d6-ngzb5\" (UID: \"7602636e-8093-4eba-90f6-d5fbe8273356\") " pod="openstack/barbican-api-7d954476d6-ngzb5" Sep 29 19:16:15 crc kubenswrapper[4792]: I0929 19:16:15.940476 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8jtn9\" (UniqueName: \"kubernetes.io/projected/7602636e-8093-4eba-90f6-d5fbe8273356-kube-api-access-8jtn9\") pod \"barbican-api-7d954476d6-ngzb5\" (UID: \"7602636e-8093-4eba-90f6-d5fbe8273356\") " pod="openstack/barbican-api-7d954476d6-ngzb5" Sep 29 19:16:15 crc kubenswrapper[4792]: I0929 19:16:15.940504 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7602636e-8093-4eba-90f6-d5fbe8273356-config-data\") pod \"barbican-api-7d954476d6-ngzb5\" (UID: \"7602636e-8093-4eba-90f6-d5fbe8273356\") " pod="openstack/barbican-api-7d954476d6-ngzb5" Sep 29 19:16:15 crc kubenswrapper[4792]: I0929 19:16:15.940522 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9w7jn\" (UniqueName: \"kubernetes.io/projected/2465ec19-7e92-4be1-9a98-3d22a1553089-kube-api-access-9w7jn\") pod \"dnsmasq-dns-848cf88cfc-kqs7x\" (UID: \"2465ec19-7e92-4be1-9a98-3d22a1553089\") " pod="openstack/dnsmasq-dns-848cf88cfc-kqs7x" Sep 29 19:16:15 crc kubenswrapper[4792]: I0929 19:16:15.943456 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/2465ec19-7e92-4be1-9a98-3d22a1553089-ovsdbserver-nb\") pod \"dnsmasq-dns-848cf88cfc-kqs7x\" (UID: \"2465ec19-7e92-4be1-9a98-3d22a1553089\") " pod="openstack/dnsmasq-dns-848cf88cfc-kqs7x" Sep 29 19:16:15 crc kubenswrapper[4792]: I0929 19:16:15.943463 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/2465ec19-7e92-4be1-9a98-3d22a1553089-dns-swift-storage-0\") pod \"dnsmasq-dns-848cf88cfc-kqs7x\" (UID: \"2465ec19-7e92-4be1-9a98-3d22a1553089\") " pod="openstack/dnsmasq-dns-848cf88cfc-kqs7x" Sep 29 19:16:15 crc kubenswrapper[4792]: I0929 19:16:15.944040 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2465ec19-7e92-4be1-9a98-3d22a1553089-config\") pod \"dnsmasq-dns-848cf88cfc-kqs7x\" (UID: \"2465ec19-7e92-4be1-9a98-3d22a1553089\") " pod="openstack/dnsmasq-dns-848cf88cfc-kqs7x" Sep 29 19:16:15 crc kubenswrapper[4792]: I0929 19:16:15.944817 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/2465ec19-7e92-4be1-9a98-3d22a1553089-ovsdbserver-sb\") pod \"dnsmasq-dns-848cf88cfc-kqs7x\" (UID: \"2465ec19-7e92-4be1-9a98-3d22a1553089\") " pod="openstack/dnsmasq-dns-848cf88cfc-kqs7x" Sep 29 19:16:15 crc kubenswrapper[4792]: I0929 19:16:15.950559 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2465ec19-7e92-4be1-9a98-3d22a1553089-dns-svc\") pod \"dnsmasq-dns-848cf88cfc-kqs7x\" (UID: \"2465ec19-7e92-4be1-9a98-3d22a1553089\") " pod="openstack/dnsmasq-dns-848cf88cfc-kqs7x" Sep 29 19:16:16 crc kubenswrapper[4792]: I0929 19:16:16.021406 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9w7jn\" (UniqueName: \"kubernetes.io/projected/2465ec19-7e92-4be1-9a98-3d22a1553089-kube-api-access-9w7jn\") pod \"dnsmasq-dns-848cf88cfc-kqs7x\" (UID: \"2465ec19-7e92-4be1-9a98-3d22a1553089\") " pod="openstack/dnsmasq-dns-848cf88cfc-kqs7x" Sep 29 19:16:16 crc kubenswrapper[4792]: I0929 19:16:16.045081 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7602636e-8093-4eba-90f6-d5fbe8273356-logs\") pod \"barbican-api-7d954476d6-ngzb5\" (UID: \"7602636e-8093-4eba-90f6-d5fbe8273356\") " pod="openstack/barbican-api-7d954476d6-ngzb5" Sep 29 19:16:16 crc kubenswrapper[4792]: I0929 19:16:16.045149 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/7602636e-8093-4eba-90f6-d5fbe8273356-config-data-custom\") pod \"barbican-api-7d954476d6-ngzb5\" (UID: \"7602636e-8093-4eba-90f6-d5fbe8273356\") " pod="openstack/barbican-api-7d954476d6-ngzb5" Sep 29 19:16:16 crc kubenswrapper[4792]: I0929 19:16:16.045237 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7602636e-8093-4eba-90f6-d5fbe8273356-combined-ca-bundle\") pod \"barbican-api-7d954476d6-ngzb5\" (UID: \"7602636e-8093-4eba-90f6-d5fbe8273356\") " pod="openstack/barbican-api-7d954476d6-ngzb5" Sep 29 19:16:16 crc kubenswrapper[4792]: I0929 19:16:16.045255 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8jtn9\" (UniqueName: \"kubernetes.io/projected/7602636e-8093-4eba-90f6-d5fbe8273356-kube-api-access-8jtn9\") pod \"barbican-api-7d954476d6-ngzb5\" (UID: \"7602636e-8093-4eba-90f6-d5fbe8273356\") " pod="openstack/barbican-api-7d954476d6-ngzb5" Sep 29 19:16:16 crc kubenswrapper[4792]: I0929 19:16:16.045286 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7602636e-8093-4eba-90f6-d5fbe8273356-config-data\") pod \"barbican-api-7d954476d6-ngzb5\" (UID: \"7602636e-8093-4eba-90f6-d5fbe8273356\") " pod="openstack/barbican-api-7d954476d6-ngzb5" Sep 29 19:16:16 crc kubenswrapper[4792]: I0929 19:16:16.047151 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7602636e-8093-4eba-90f6-d5fbe8273356-logs\") pod \"barbican-api-7d954476d6-ngzb5\" (UID: \"7602636e-8093-4eba-90f6-d5fbe8273356\") " pod="openstack/barbican-api-7d954476d6-ngzb5" Sep 29 19:16:16 crc kubenswrapper[4792]: I0929 19:16:16.053461 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7602636e-8093-4eba-90f6-d5fbe8273356-config-data\") pod \"barbican-api-7d954476d6-ngzb5\" (UID: \"7602636e-8093-4eba-90f6-d5fbe8273356\") " pod="openstack/barbican-api-7d954476d6-ngzb5" Sep 29 19:16:16 crc kubenswrapper[4792]: I0929 19:16:16.085629 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/7602636e-8093-4eba-90f6-d5fbe8273356-config-data-custom\") pod \"barbican-api-7d954476d6-ngzb5\" (UID: \"7602636e-8093-4eba-90f6-d5fbe8273356\") " pod="openstack/barbican-api-7d954476d6-ngzb5" Sep 29 19:16:16 crc kubenswrapper[4792]: I0929 19:16:16.087085 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7602636e-8093-4eba-90f6-d5fbe8273356-combined-ca-bundle\") pod \"barbican-api-7d954476d6-ngzb5\" (UID: \"7602636e-8093-4eba-90f6-d5fbe8273356\") " pod="openstack/barbican-api-7d954476d6-ngzb5" Sep 29 19:16:16 crc kubenswrapper[4792]: I0929 19:16:16.093923 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8jtn9\" (UniqueName: \"kubernetes.io/projected/7602636e-8093-4eba-90f6-d5fbe8273356-kube-api-access-8jtn9\") pod \"barbican-api-7d954476d6-ngzb5\" (UID: \"7602636e-8093-4eba-90f6-d5fbe8273356\") " pod="openstack/barbican-api-7d954476d6-ngzb5" Sep 29 19:16:16 crc kubenswrapper[4792]: I0929 19:16:16.147249 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"ffd2c1db-2c19-492f-8783-f03f235013da","Type":"ContainerStarted","Data":"296697811ab6aba68cfb065cda6c785c1c9a617b06fde3730edfd00ed8afd010"} Sep 29 19:16:16 crc kubenswrapper[4792]: I0929 19:16:16.179989 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-7d954476d6-ngzb5" Sep 29 19:16:16 crc kubenswrapper[4792]: I0929 19:16:16.224393 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=9.224371395 podStartE2EDuration="9.224371395s" podCreationTimestamp="2025-09-29 19:16:07 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 19:16:16.220991355 +0000 UTC m=+1188.214298751" watchObservedRunningTime="2025-09-29 19:16:16.224371395 +0000 UTC m=+1188.217678821" Sep 29 19:16:16 crc kubenswrapper[4792]: I0929 19:16:16.272681 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-848cf88cfc-kqs7x" Sep 29 19:16:16 crc kubenswrapper[4792]: I0929 19:16:16.627459 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-754c4b8fcb-w2t8n"] Sep 29 19:16:16 crc kubenswrapper[4792]: I0929 19:16:16.845428 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-666f667548-cm9kb"] Sep 29 19:16:17 crc kubenswrapper[4792]: I0929 19:16:17.070367 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fc9bf108-a60d-4111-b064-f37789e2d7c1" path="/var/lib/kubelet/pods/fc9bf108-a60d-4111-b064-f37789e2d7c1/volumes" Sep 29 19:16:17 crc kubenswrapper[4792]: I0929 19:16:17.227723 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-754c4b8fcb-w2t8n" event={"ID":"a4b671bb-328e-401e-933f-665848067860","Type":"ContainerStarted","Data":"3599cd0d17cf97fae30ee1bfbbcb1b5fd3d351cada374458466942c0d0d1ee55"} Sep 29 19:16:17 crc kubenswrapper[4792]: I0929 19:16:17.244129 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-666f667548-cm9kb" event={"ID":"d463ed77-f74f-4724-b942-1f542755d4d4","Type":"ContainerStarted","Data":"a6c601ca82f6ac5969ebefe8b2e8e369525d91e123d275aa968a54266a6103c1"} Sep 29 19:16:17 crc kubenswrapper[4792]: I0929 19:16:17.520888 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-7d954476d6-ngzb5"] Sep 29 19:16:17 crc kubenswrapper[4792]: I0929 19:16:17.524322 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Sep 29 19:16:17 crc kubenswrapper[4792]: I0929 19:16:17.524367 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Sep 29 19:16:17 crc kubenswrapper[4792]: I0929 19:16:17.611454 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-worker-5697845c85-cnq66"] Sep 29 19:16:17 crc kubenswrapper[4792]: I0929 19:16:17.643871 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-keystone-listener-6864b589b6-rj9q8"] Sep 29 19:16:17 crc kubenswrapper[4792]: I0929 19:16:17.671280 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-848cf88cfc-kqs7x"] Sep 29 19:16:17 crc kubenswrapper[4792]: I0929 19:16:17.682971 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Sep 29 19:16:17 crc kubenswrapper[4792]: I0929 19:16:17.759139 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Sep 29 19:16:18 crc kubenswrapper[4792]: I0929 19:16:18.274943 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-848cf88cfc-kqs7x" event={"ID":"2465ec19-7e92-4be1-9a98-3d22a1553089","Type":"ContainerStarted","Data":"0881e694285eaf97fcb6b371d228d5ef5b5aa7fdfb004571a453cdc2fd63d593"} Sep 29 19:16:18 crc kubenswrapper[4792]: I0929 19:16:18.282803 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-754c4b8fcb-w2t8n" event={"ID":"a4b671bb-328e-401e-933f-665848067860","Type":"ContainerStarted","Data":"ee53f7635ff2cb219cdd6514d2643fd6719e0f7359bd4eecb15a6e5650d6b299"} Sep 29 19:16:18 crc kubenswrapper[4792]: I0929 19:16:18.282909 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/keystone-754c4b8fcb-w2t8n" Sep 29 19:16:18 crc kubenswrapper[4792]: I0929 19:16:18.299280 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-666f667548-cm9kb" event={"ID":"d463ed77-f74f-4724-b942-1f542755d4d4","Type":"ContainerStarted","Data":"453d483ea0864a2d14494f2aeabde69c87a78ab931368c1031d3b1a4eb8a39c9"} Sep 29 19:16:18 crc kubenswrapper[4792]: I0929 19:16:18.311064 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-754c4b8fcb-w2t8n" podStartSLOduration=4.311038076 podStartE2EDuration="4.311038076s" podCreationTimestamp="2025-09-29 19:16:14 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 19:16:18.301887751 +0000 UTC m=+1190.295195157" watchObservedRunningTime="2025-09-29 19:16:18.311038076 +0000 UTC m=+1190.304345482" Sep 29 19:16:18 crc kubenswrapper[4792]: I0929 19:16:18.321797 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-5697845c85-cnq66" event={"ID":"c4ee74b8-8ac4-4a34-967b-6fcb220e90fa","Type":"ContainerStarted","Data":"a9bec84d46728016a411ed04e19dec7da81edc1d85f31dbd6c9fe441771c2048"} Sep 29 19:16:18 crc kubenswrapper[4792]: I0929 19:16:18.326959 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-7d954476d6-ngzb5" event={"ID":"7602636e-8093-4eba-90f6-d5fbe8273356","Type":"ContainerStarted","Data":"1b1149a36c8fd58117aa4fc036a15d384bb0b1b06a5eb4e83ef3cada9cc2481f"} Sep 29 19:16:18 crc kubenswrapper[4792]: I0929 19:16:18.337774 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-6864b589b6-rj9q8" event={"ID":"799e0c59-e4f3-4d1c-8b3e-4771a3d5fecb","Type":"ContainerStarted","Data":"b21b2e7850eb179e2e94efb97a55bcd8be1fd77c29ebf6c40d2dcc2945f68ed2"} Sep 29 19:16:18 crc kubenswrapper[4792]: I0929 19:16:18.337810 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Sep 29 19:16:18 crc kubenswrapper[4792]: I0929 19:16:18.337829 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Sep 29 19:16:18 crc kubenswrapper[4792]: I0929 19:16:18.729470 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Sep 29 19:16:18 crc kubenswrapper[4792]: I0929 19:16:18.730072 4792 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Sep 29 19:16:18 crc kubenswrapper[4792]: I0929 19:16:18.736020 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Sep 29 19:16:19 crc kubenswrapper[4792]: I0929 19:16:19.360001 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-7d954476d6-ngzb5" event={"ID":"7602636e-8093-4eba-90f6-d5fbe8273356","Type":"ContainerStarted","Data":"270c89a24d61ed075e395dc11a1acae0e1d4be01ddec7366bb98c2b29771ed0c"} Sep 29 19:16:19 crc kubenswrapper[4792]: I0929 19:16:19.360312 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-7d954476d6-ngzb5" event={"ID":"7602636e-8093-4eba-90f6-d5fbe8273356","Type":"ContainerStarted","Data":"7cae590a7d16113a2c43e151b966b2caa78056c3369b765210716d97c306136b"} Sep 29 19:16:19 crc kubenswrapper[4792]: I0929 19:16:19.360365 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-7d954476d6-ngzb5" Sep 29 19:16:19 crc kubenswrapper[4792]: I0929 19:16:19.360385 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-7d954476d6-ngzb5" Sep 29 19:16:19 crc kubenswrapper[4792]: I0929 19:16:19.368240 4792 generic.go:334] "Generic (PLEG): container finished" podID="2465ec19-7e92-4be1-9a98-3d22a1553089" containerID="76b06e995a838dbc8a199d889e6a5c6c0114e28fed3eb5c8041aa9c81caac7b1" exitCode=0 Sep 29 19:16:19 crc kubenswrapper[4792]: I0929 19:16:19.368299 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-848cf88cfc-kqs7x" event={"ID":"2465ec19-7e92-4be1-9a98-3d22a1553089","Type":"ContainerDied","Data":"76b06e995a838dbc8a199d889e6a5c6c0114e28fed3eb5c8041aa9c81caac7b1"} Sep 29 19:16:19 crc kubenswrapper[4792]: I0929 19:16:19.374188 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-666f667548-cm9kb" event={"ID":"d463ed77-f74f-4724-b942-1f542755d4d4","Type":"ContainerStarted","Data":"d24536446140118f2a104d82ee822ef0c06c15a8c5b766cb932c849c553f3261"} Sep 29 19:16:19 crc kubenswrapper[4792]: I0929 19:16:19.374235 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/placement-666f667548-cm9kb" Sep 29 19:16:19 crc kubenswrapper[4792]: I0929 19:16:19.376483 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/placement-666f667548-cm9kb" Sep 29 19:16:19 crc kubenswrapper[4792]: I0929 19:16:19.403451 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-api-7d954476d6-ngzb5" podStartSLOduration=4.403429399 podStartE2EDuration="4.403429399s" podCreationTimestamp="2025-09-29 19:16:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 19:16:19.376957383 +0000 UTC m=+1191.370264779" watchObservedRunningTime="2025-09-29 19:16:19.403429399 +0000 UTC m=+1191.396736795" Sep 29 19:16:19 crc kubenswrapper[4792]: I0929 19:16:19.476247 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-666f667548-cm9kb" podStartSLOduration=4.476228531 podStartE2EDuration="4.476228531s" podCreationTimestamp="2025-09-29 19:16:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 19:16:19.423023682 +0000 UTC m=+1191.416331078" watchObservedRunningTime="2025-09-29 19:16:19.476228531 +0000 UTC m=+1191.469535927" Sep 29 19:16:20 crc kubenswrapper[4792]: I0929 19:16:20.368596 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-api-65c5d84686-mddqt"] Sep 29 19:16:20 crc kubenswrapper[4792]: I0929 19:16:20.370303 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-65c5d84686-mddqt" Sep 29 19:16:20 crc kubenswrapper[4792]: I0929 19:16:20.375672 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-barbican-public-svc" Sep 29 19:16:20 crc kubenswrapper[4792]: I0929 19:16:20.376029 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-barbican-internal-svc" Sep 29 19:16:20 crc kubenswrapper[4792]: I0929 19:16:20.398318 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-65c5d84686-mddqt"] Sep 29 19:16:20 crc kubenswrapper[4792]: I0929 19:16:20.401340 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-848cf88cfc-kqs7x" event={"ID":"2465ec19-7e92-4be1-9a98-3d22a1553089","Type":"ContainerStarted","Data":"0d01e38b3049ecd69e2b514251034484d0b6f9c288a416a91d536117758d34fa"} Sep 29 19:16:20 crc kubenswrapper[4792]: I0929 19:16:20.401717 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-848cf88cfc-kqs7x" Sep 29 19:16:20 crc kubenswrapper[4792]: I0929 19:16:20.457790 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-848cf88cfc-kqs7x" podStartSLOduration=5.457771507 podStartE2EDuration="5.457771507s" podCreationTimestamp="2025-09-29 19:16:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 19:16:20.455318071 +0000 UTC m=+1192.448625477" watchObservedRunningTime="2025-09-29 19:16:20.457771507 +0000 UTC m=+1192.451078903" Sep 29 19:16:20 crc kubenswrapper[4792]: I0929 19:16:20.531478 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lk795\" (UniqueName: \"kubernetes.io/projected/9edbf81b-9313-4a4c-8dd0-b29b82f32888-kube-api-access-lk795\") pod \"barbican-api-65c5d84686-mddqt\" (UID: \"9edbf81b-9313-4a4c-8dd0-b29b82f32888\") " pod="openstack/barbican-api-65c5d84686-mddqt" Sep 29 19:16:20 crc kubenswrapper[4792]: I0929 19:16:20.531520 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9edbf81b-9313-4a4c-8dd0-b29b82f32888-combined-ca-bundle\") pod \"barbican-api-65c5d84686-mddqt\" (UID: \"9edbf81b-9313-4a4c-8dd0-b29b82f32888\") " pod="openstack/barbican-api-65c5d84686-mddqt" Sep 29 19:16:20 crc kubenswrapper[4792]: I0929 19:16:20.531547 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/9edbf81b-9313-4a4c-8dd0-b29b82f32888-internal-tls-certs\") pod \"barbican-api-65c5d84686-mddqt\" (UID: \"9edbf81b-9313-4a4c-8dd0-b29b82f32888\") " pod="openstack/barbican-api-65c5d84686-mddqt" Sep 29 19:16:20 crc kubenswrapper[4792]: I0929 19:16:20.531631 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/9edbf81b-9313-4a4c-8dd0-b29b82f32888-public-tls-certs\") pod \"barbican-api-65c5d84686-mddqt\" (UID: \"9edbf81b-9313-4a4c-8dd0-b29b82f32888\") " pod="openstack/barbican-api-65c5d84686-mddqt" Sep 29 19:16:20 crc kubenswrapper[4792]: I0929 19:16:20.531775 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9edbf81b-9313-4a4c-8dd0-b29b82f32888-config-data\") pod \"barbican-api-65c5d84686-mddqt\" (UID: \"9edbf81b-9313-4a4c-8dd0-b29b82f32888\") " pod="openstack/barbican-api-65c5d84686-mddqt" Sep 29 19:16:20 crc kubenswrapper[4792]: I0929 19:16:20.531887 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9edbf81b-9313-4a4c-8dd0-b29b82f32888-logs\") pod \"barbican-api-65c5d84686-mddqt\" (UID: \"9edbf81b-9313-4a4c-8dd0-b29b82f32888\") " pod="openstack/barbican-api-65c5d84686-mddqt" Sep 29 19:16:20 crc kubenswrapper[4792]: I0929 19:16:20.531902 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/9edbf81b-9313-4a4c-8dd0-b29b82f32888-config-data-custom\") pod \"barbican-api-65c5d84686-mddqt\" (UID: \"9edbf81b-9313-4a4c-8dd0-b29b82f32888\") " pod="openstack/barbican-api-65c5d84686-mddqt" Sep 29 19:16:20 crc kubenswrapper[4792]: I0929 19:16:20.633429 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9edbf81b-9313-4a4c-8dd0-b29b82f32888-config-data\") pod \"barbican-api-65c5d84686-mddqt\" (UID: \"9edbf81b-9313-4a4c-8dd0-b29b82f32888\") " pod="openstack/barbican-api-65c5d84686-mddqt" Sep 29 19:16:20 crc kubenswrapper[4792]: I0929 19:16:20.633543 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9edbf81b-9313-4a4c-8dd0-b29b82f32888-logs\") pod \"barbican-api-65c5d84686-mddqt\" (UID: \"9edbf81b-9313-4a4c-8dd0-b29b82f32888\") " pod="openstack/barbican-api-65c5d84686-mddqt" Sep 29 19:16:20 crc kubenswrapper[4792]: I0929 19:16:20.633561 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/9edbf81b-9313-4a4c-8dd0-b29b82f32888-config-data-custom\") pod \"barbican-api-65c5d84686-mddqt\" (UID: \"9edbf81b-9313-4a4c-8dd0-b29b82f32888\") " pod="openstack/barbican-api-65c5d84686-mddqt" Sep 29 19:16:20 crc kubenswrapper[4792]: I0929 19:16:20.633593 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lk795\" (UniqueName: \"kubernetes.io/projected/9edbf81b-9313-4a4c-8dd0-b29b82f32888-kube-api-access-lk795\") pod \"barbican-api-65c5d84686-mddqt\" (UID: \"9edbf81b-9313-4a4c-8dd0-b29b82f32888\") " pod="openstack/barbican-api-65c5d84686-mddqt" Sep 29 19:16:20 crc kubenswrapper[4792]: I0929 19:16:20.633609 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9edbf81b-9313-4a4c-8dd0-b29b82f32888-combined-ca-bundle\") pod \"barbican-api-65c5d84686-mddqt\" (UID: \"9edbf81b-9313-4a4c-8dd0-b29b82f32888\") " pod="openstack/barbican-api-65c5d84686-mddqt" Sep 29 19:16:20 crc kubenswrapper[4792]: I0929 19:16:20.633629 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/9edbf81b-9313-4a4c-8dd0-b29b82f32888-internal-tls-certs\") pod \"barbican-api-65c5d84686-mddqt\" (UID: \"9edbf81b-9313-4a4c-8dd0-b29b82f32888\") " pod="openstack/barbican-api-65c5d84686-mddqt" Sep 29 19:16:20 crc kubenswrapper[4792]: I0929 19:16:20.633666 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/9edbf81b-9313-4a4c-8dd0-b29b82f32888-public-tls-certs\") pod \"barbican-api-65c5d84686-mddqt\" (UID: \"9edbf81b-9313-4a4c-8dd0-b29b82f32888\") " pod="openstack/barbican-api-65c5d84686-mddqt" Sep 29 19:16:20 crc kubenswrapper[4792]: I0929 19:16:20.637920 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9edbf81b-9313-4a4c-8dd0-b29b82f32888-logs\") pod \"barbican-api-65c5d84686-mddqt\" (UID: \"9edbf81b-9313-4a4c-8dd0-b29b82f32888\") " pod="openstack/barbican-api-65c5d84686-mddqt" Sep 29 19:16:20 crc kubenswrapper[4792]: I0929 19:16:20.646597 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/9edbf81b-9313-4a4c-8dd0-b29b82f32888-config-data-custom\") pod \"barbican-api-65c5d84686-mddqt\" (UID: \"9edbf81b-9313-4a4c-8dd0-b29b82f32888\") " pod="openstack/barbican-api-65c5d84686-mddqt" Sep 29 19:16:20 crc kubenswrapper[4792]: I0929 19:16:20.647747 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9edbf81b-9313-4a4c-8dd0-b29b82f32888-config-data\") pod \"barbican-api-65c5d84686-mddqt\" (UID: \"9edbf81b-9313-4a4c-8dd0-b29b82f32888\") " pod="openstack/barbican-api-65c5d84686-mddqt" Sep 29 19:16:20 crc kubenswrapper[4792]: I0929 19:16:20.651043 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9edbf81b-9313-4a4c-8dd0-b29b82f32888-combined-ca-bundle\") pod \"barbican-api-65c5d84686-mddqt\" (UID: \"9edbf81b-9313-4a4c-8dd0-b29b82f32888\") " pod="openstack/barbican-api-65c5d84686-mddqt" Sep 29 19:16:20 crc kubenswrapper[4792]: I0929 19:16:20.654676 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/9edbf81b-9313-4a4c-8dd0-b29b82f32888-public-tls-certs\") pod \"barbican-api-65c5d84686-mddqt\" (UID: \"9edbf81b-9313-4a4c-8dd0-b29b82f32888\") " pod="openstack/barbican-api-65c5d84686-mddqt" Sep 29 19:16:20 crc kubenswrapper[4792]: I0929 19:16:20.670739 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/9edbf81b-9313-4a4c-8dd0-b29b82f32888-internal-tls-certs\") pod \"barbican-api-65c5d84686-mddqt\" (UID: \"9edbf81b-9313-4a4c-8dd0-b29b82f32888\") " pod="openstack/barbican-api-65c5d84686-mddqt" Sep 29 19:16:20 crc kubenswrapper[4792]: I0929 19:16:20.690900 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lk795\" (UniqueName: \"kubernetes.io/projected/9edbf81b-9313-4a4c-8dd0-b29b82f32888-kube-api-access-lk795\") pod \"barbican-api-65c5d84686-mddqt\" (UID: \"9edbf81b-9313-4a4c-8dd0-b29b82f32888\") " pod="openstack/barbican-api-65c5d84686-mddqt" Sep 29 19:16:20 crc kubenswrapper[4792]: I0929 19:16:20.987472 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-65c5d84686-mddqt" Sep 29 19:16:22 crc kubenswrapper[4792]: I0929 19:16:22.963948 4792 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-dfd9c6b56-wq84c" podUID="eb79e1ef-cf0c-407b-9b37-c7ad8d65a3cc" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.150:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.150:8443: connect: connection refused" Sep 29 19:16:23 crc kubenswrapper[4792]: I0929 19:16:23.090652 4792 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-8494dffd6-7rx5p" podUID="23845288-b122-49f0-b10d-641cfb94b66f" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.151:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.151:8443: connect: connection refused" Sep 29 19:16:23 crc kubenswrapper[4792]: I0929 19:16:23.307383 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-65c5d84686-mddqt"] Sep 29 19:16:23 crc kubenswrapper[4792]: I0929 19:16:23.435918 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-6864b589b6-rj9q8" event={"ID":"799e0c59-e4f3-4d1c-8b3e-4771a3d5fecb","Type":"ContainerStarted","Data":"61d16fc3d824d072dfc66bd4e4f1b465ff945c511f1f8186de969f9bc807aba1"} Sep 29 19:16:23 crc kubenswrapper[4792]: I0929 19:16:23.435964 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-6864b589b6-rj9q8" event={"ID":"799e0c59-e4f3-4d1c-8b3e-4771a3d5fecb","Type":"ContainerStarted","Data":"3f7aecb7ad0387f65c5421f14abc82c97bab608ef11b19a2f6d1de96038342ce"} Sep 29 19:16:23 crc kubenswrapper[4792]: I0929 19:16:23.442574 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-65c5d84686-mddqt" event={"ID":"9edbf81b-9313-4a4c-8dd0-b29b82f32888","Type":"ContainerStarted","Data":"7699f9b930b5d484636b429336b33fdfe990afa915d99355c219f84d13ed45da"} Sep 29 19:16:23 crc kubenswrapper[4792]: I0929 19:16:23.455516 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-5697845c85-cnq66" event={"ID":"c4ee74b8-8ac4-4a34-967b-6fcb220e90fa","Type":"ContainerStarted","Data":"3088a9fda4104b9a46bb561ab26cd88f121aef9b15978a0ae8d0431d56519334"} Sep 29 19:16:23 crc kubenswrapper[4792]: I0929 19:16:23.455553 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-5697845c85-cnq66" event={"ID":"c4ee74b8-8ac4-4a34-967b-6fcb220e90fa","Type":"ContainerStarted","Data":"ef325a0b80e0314eabc0d111f721e88b7cf9b86fed7ef14bd7aa53678f62fd12"} Sep 29 19:16:23 crc kubenswrapper[4792]: I0929 19:16:23.460504 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-keystone-listener-6864b589b6-rj9q8" podStartSLOduration=3.560288152 podStartE2EDuration="8.460487297s" podCreationTimestamp="2025-09-29 19:16:15 +0000 UTC" firstStartedPulling="2025-09-29 19:16:17.648120956 +0000 UTC m=+1189.641428352" lastFinishedPulling="2025-09-29 19:16:22.548320101 +0000 UTC m=+1194.541627497" observedRunningTime="2025-09-29 19:16:23.456027928 +0000 UTC m=+1195.449335324" watchObservedRunningTime="2025-09-29 19:16:23.460487297 +0000 UTC m=+1195.453794693" Sep 29 19:16:23 crc kubenswrapper[4792]: I0929 19:16:23.486252 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-worker-5697845c85-cnq66" podStartSLOduration=3.5776641160000002 podStartE2EDuration="8.486231764s" podCreationTimestamp="2025-09-29 19:16:15 +0000 UTC" firstStartedPulling="2025-09-29 19:16:17.63813306 +0000 UTC m=+1189.631440456" lastFinishedPulling="2025-09-29 19:16:22.546700708 +0000 UTC m=+1194.540008104" observedRunningTime="2025-09-29 19:16:23.478407585 +0000 UTC m=+1195.471714981" watchObservedRunningTime="2025-09-29 19:16:23.486231764 +0000 UTC m=+1195.479539160" Sep 29 19:16:24 crc kubenswrapper[4792]: I0929 19:16:24.447501 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Sep 29 19:16:24 crc kubenswrapper[4792]: I0929 19:16:24.467388 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-65c5d84686-mddqt" event={"ID":"9edbf81b-9313-4a4c-8dd0-b29b82f32888","Type":"ContainerStarted","Data":"e783e24ec93c8368de2a1b2e18c9e88fa2434a2f1c4897ce4946a320e6822541"} Sep 29 19:16:24 crc kubenswrapper[4792]: I0929 19:16:24.467424 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-65c5d84686-mddqt" event={"ID":"9edbf81b-9313-4a4c-8dd0-b29b82f32888","Type":"ContainerStarted","Data":"3dc8e43ce2fdc045a84b1c5e832fcac1b8032450d80e386ee2b1f8e2cf263904"} Sep 29 19:16:24 crc kubenswrapper[4792]: I0929 19:16:24.468203 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-65c5d84686-mddqt" Sep 29 19:16:24 crc kubenswrapper[4792]: I0929 19:16:24.468264 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-65c5d84686-mddqt" Sep 29 19:16:24 crc kubenswrapper[4792]: I0929 19:16:24.468669 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Sep 29 19:16:24 crc kubenswrapper[4792]: I0929 19:16:24.490124 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-api-65c5d84686-mddqt" podStartSLOduration=4.490109887 podStartE2EDuration="4.490109887s" podCreationTimestamp="2025-09-29 19:16:20 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 19:16:24.485408771 +0000 UTC m=+1196.478716177" watchObservedRunningTime="2025-09-29 19:16:24.490109887 +0000 UTC m=+1196.483417283" Sep 29 19:16:26 crc kubenswrapper[4792]: I0929 19:16:26.276966 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-848cf88cfc-kqs7x" Sep 29 19:16:26 crc kubenswrapper[4792]: I0929 19:16:26.398455 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6b7b667979-6qkxx"] Sep 29 19:16:26 crc kubenswrapper[4792]: I0929 19:16:26.398818 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-6b7b667979-6qkxx" podUID="b7b5b809-50d5-467e-9faf-3d4398f81b15" containerName="dnsmasq-dns" containerID="cri-o://35b9217f49e1a490d6c94be56e7b3250f2a452ab9417107400a369fa0c6e4228" gracePeriod=10 Sep 29 19:16:27 crc kubenswrapper[4792]: I0929 19:16:27.523650 4792 generic.go:334] "Generic (PLEG): container finished" podID="b7b5b809-50d5-467e-9faf-3d4398f81b15" containerID="35b9217f49e1a490d6c94be56e7b3250f2a452ab9417107400a369fa0c6e4228" exitCode=0 Sep 29 19:16:27 crc kubenswrapper[4792]: I0929 19:16:27.523841 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6b7b667979-6qkxx" event={"ID":"b7b5b809-50d5-467e-9faf-3d4398f81b15","Type":"ContainerDied","Data":"35b9217f49e1a490d6c94be56e7b3250f2a452ab9417107400a369fa0c6e4228"} Sep 29 19:16:28 crc kubenswrapper[4792]: I0929 19:16:28.534190 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-cvlgm" event={"ID":"bd0405ab-8be9-41cd-aa4d-7cbe44be3049","Type":"ContainerDied","Data":"d603761937a93c10f270ab88e50f1fcce9c85642279a25c8fc9caf4866875be0"} Sep 29 19:16:28 crc kubenswrapper[4792]: I0929 19:16:28.534213 4792 generic.go:334] "Generic (PLEG): container finished" podID="bd0405ab-8be9-41cd-aa4d-7cbe44be3049" containerID="d603761937a93c10f270ab88e50f1fcce9c85642279a25c8fc9caf4866875be0" exitCode=0 Sep 29 19:16:28 crc kubenswrapper[4792]: I0929 19:16:28.630003 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-7d954476d6-ngzb5" Sep 29 19:16:28 crc kubenswrapper[4792]: I0929 19:16:28.953588 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-7d954476d6-ngzb5" Sep 29 19:16:30 crc kubenswrapper[4792]: I0929 19:16:30.421404 4792 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-6b7b667979-6qkxx" podUID="b7b5b809-50d5-467e-9faf-3d4398f81b15" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.154:5353: connect: connection refused" Sep 29 19:16:30 crc kubenswrapper[4792]: I0929 19:16:30.579728 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/neutron-76468fd5f8-gfqwb" Sep 29 19:16:31 crc kubenswrapper[4792]: I0929 19:16:31.600781 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-cvlgm" event={"ID":"bd0405ab-8be9-41cd-aa4d-7cbe44be3049","Type":"ContainerDied","Data":"ed5d3e8c6df3780d0c885ca3d0c7b56e23a627ba00fb6f6824b6ebcaca94bd85"} Sep 29 19:16:31 crc kubenswrapper[4792]: I0929 19:16:31.601104 4792 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ed5d3e8c6df3780d0c885ca3d0c7b56e23a627ba00fb6f6824b6ebcaca94bd85" Sep 29 19:16:31 crc kubenswrapper[4792]: I0929 19:16:31.635963 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-cvlgm" Sep 29 19:16:31 crc kubenswrapper[4792]: I0929 19:16:31.717382 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/bd0405ab-8be9-41cd-aa4d-7cbe44be3049-db-sync-config-data\") pod \"bd0405ab-8be9-41cd-aa4d-7cbe44be3049\" (UID: \"bd0405ab-8be9-41cd-aa4d-7cbe44be3049\") " Sep 29 19:16:31 crc kubenswrapper[4792]: I0929 19:16:31.717502 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bd0405ab-8be9-41cd-aa4d-7cbe44be3049-config-data\") pod \"bd0405ab-8be9-41cd-aa4d-7cbe44be3049\" (UID: \"bd0405ab-8be9-41cd-aa4d-7cbe44be3049\") " Sep 29 19:16:31 crc kubenswrapper[4792]: I0929 19:16:31.717540 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bd0405ab-8be9-41cd-aa4d-7cbe44be3049-combined-ca-bundle\") pod \"bd0405ab-8be9-41cd-aa4d-7cbe44be3049\" (UID: \"bd0405ab-8be9-41cd-aa4d-7cbe44be3049\") " Sep 29 19:16:31 crc kubenswrapper[4792]: I0929 19:16:31.717587 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/bd0405ab-8be9-41cd-aa4d-7cbe44be3049-etc-machine-id\") pod \"bd0405ab-8be9-41cd-aa4d-7cbe44be3049\" (UID: \"bd0405ab-8be9-41cd-aa4d-7cbe44be3049\") " Sep 29 19:16:31 crc kubenswrapper[4792]: I0929 19:16:31.717603 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hcl6j\" (UniqueName: \"kubernetes.io/projected/bd0405ab-8be9-41cd-aa4d-7cbe44be3049-kube-api-access-hcl6j\") pod \"bd0405ab-8be9-41cd-aa4d-7cbe44be3049\" (UID: \"bd0405ab-8be9-41cd-aa4d-7cbe44be3049\") " Sep 29 19:16:31 crc kubenswrapper[4792]: I0929 19:16:31.717624 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/bd0405ab-8be9-41cd-aa4d-7cbe44be3049-scripts\") pod \"bd0405ab-8be9-41cd-aa4d-7cbe44be3049\" (UID: \"bd0405ab-8be9-41cd-aa4d-7cbe44be3049\") " Sep 29 19:16:31 crc kubenswrapper[4792]: I0929 19:16:31.717986 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/bd0405ab-8be9-41cd-aa4d-7cbe44be3049-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "bd0405ab-8be9-41cd-aa4d-7cbe44be3049" (UID: "bd0405ab-8be9-41cd-aa4d-7cbe44be3049"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 29 19:16:31 crc kubenswrapper[4792]: I0929 19:16:31.729194 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bd0405ab-8be9-41cd-aa4d-7cbe44be3049-kube-api-access-hcl6j" (OuterVolumeSpecName: "kube-api-access-hcl6j") pod "bd0405ab-8be9-41cd-aa4d-7cbe44be3049" (UID: "bd0405ab-8be9-41cd-aa4d-7cbe44be3049"). InnerVolumeSpecName "kube-api-access-hcl6j". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:16:31 crc kubenswrapper[4792]: I0929 19:16:31.764820 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bd0405ab-8be9-41cd-aa4d-7cbe44be3049-scripts" (OuterVolumeSpecName: "scripts") pod "bd0405ab-8be9-41cd-aa4d-7cbe44be3049" (UID: "bd0405ab-8be9-41cd-aa4d-7cbe44be3049"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:16:31 crc kubenswrapper[4792]: I0929 19:16:31.765042 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bd0405ab-8be9-41cd-aa4d-7cbe44be3049-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "bd0405ab-8be9-41cd-aa4d-7cbe44be3049" (UID: "bd0405ab-8be9-41cd-aa4d-7cbe44be3049"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:16:31 crc kubenswrapper[4792]: I0929 19:16:31.792009 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bd0405ab-8be9-41cd-aa4d-7cbe44be3049-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "bd0405ab-8be9-41cd-aa4d-7cbe44be3049" (UID: "bd0405ab-8be9-41cd-aa4d-7cbe44be3049"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:16:31 crc kubenswrapper[4792]: I0929 19:16:31.819522 4792 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bd0405ab-8be9-41cd-aa4d-7cbe44be3049-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 19:16:31 crc kubenswrapper[4792]: I0929 19:16:31.819548 4792 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/bd0405ab-8be9-41cd-aa4d-7cbe44be3049-etc-machine-id\") on node \"crc\" DevicePath \"\"" Sep 29 19:16:31 crc kubenswrapper[4792]: I0929 19:16:31.819558 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hcl6j\" (UniqueName: \"kubernetes.io/projected/bd0405ab-8be9-41cd-aa4d-7cbe44be3049-kube-api-access-hcl6j\") on node \"crc\" DevicePath \"\"" Sep 29 19:16:31 crc kubenswrapper[4792]: I0929 19:16:31.819566 4792 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/bd0405ab-8be9-41cd-aa4d-7cbe44be3049-scripts\") on node \"crc\" DevicePath \"\"" Sep 29 19:16:31 crc kubenswrapper[4792]: I0929 19:16:31.819576 4792 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/bd0405ab-8be9-41cd-aa4d-7cbe44be3049-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 19:16:31 crc kubenswrapper[4792]: I0929 19:16:31.829123 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bd0405ab-8be9-41cd-aa4d-7cbe44be3049-config-data" (OuterVolumeSpecName: "config-data") pod "bd0405ab-8be9-41cd-aa4d-7cbe44be3049" (UID: "bd0405ab-8be9-41cd-aa4d-7cbe44be3049"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:16:31 crc kubenswrapper[4792]: I0929 19:16:31.922070 4792 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bd0405ab-8be9-41cd-aa4d-7cbe44be3049-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 19:16:32 crc kubenswrapper[4792]: I0929 19:16:32.609306 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-cvlgm" Sep 29 19:16:32 crc kubenswrapper[4792]: I0929 19:16:32.991472 4792 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-dfd9c6b56-wq84c" podUID="eb79e1ef-cf0c-407b-9b37-c7ad8d65a3cc" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.150:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.150:8443: connect: connection refused" Sep 29 19:16:32 crc kubenswrapper[4792]: I0929 19:16:32.994909 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-scheduler-0"] Sep 29 19:16:32 crc kubenswrapper[4792]: E0929 19:16:32.995977 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bd0405ab-8be9-41cd-aa4d-7cbe44be3049" containerName="cinder-db-sync" Sep 29 19:16:32 crc kubenswrapper[4792]: I0929 19:16:32.995993 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="bd0405ab-8be9-41cd-aa4d-7cbe44be3049" containerName="cinder-db-sync" Sep 29 19:16:32 crc kubenswrapper[4792]: I0929 19:16:32.996335 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="bd0405ab-8be9-41cd-aa4d-7cbe44be3049" containerName="cinder-db-sync" Sep 29 19:16:32 crc kubenswrapper[4792]: I0929 19:16:32.998536 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Sep 29 19:16:33 crc kubenswrapper[4792]: I0929 19:16:33.006547 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-config-data" Sep 29 19:16:33 crc kubenswrapper[4792]: I0929 19:16:33.006801 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scripts" Sep 29 19:16:33 crc kubenswrapper[4792]: I0929 19:16:33.007285 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scheduler-config-data" Sep 29 19:16:33 crc kubenswrapper[4792]: I0929 19:16:33.007440 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-cinder-dockercfg-xqr5j" Sep 29 19:16:33 crc kubenswrapper[4792]: I0929 19:16:33.049769 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Sep 29 19:16:33 crc kubenswrapper[4792]: I0929 19:16:33.083810 4792 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-8494dffd6-7rx5p" podUID="23845288-b122-49f0-b10d-641cfb94b66f" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.151:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.151:8443: connect: connection refused" Sep 29 19:16:33 crc kubenswrapper[4792]: I0929 19:16:33.099459 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-6578955fd5-jn4q7"] Sep 29 19:16:33 crc kubenswrapper[4792]: I0929 19:16:33.101042 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6578955fd5-jn4q7" Sep 29 19:16:33 crc kubenswrapper[4792]: I0929 19:16:33.107762 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6578955fd5-jn4q7"] Sep 29 19:16:33 crc kubenswrapper[4792]: I0929 19:16:33.165237 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/4c4f17e5-0900-434c-8110-89e781430ded-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"4c4f17e5-0900-434c-8110-89e781430ded\") " pod="openstack/cinder-scheduler-0" Sep 29 19:16:33 crc kubenswrapper[4792]: I0929 19:16:33.165282 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4c4f17e5-0900-434c-8110-89e781430ded-scripts\") pod \"cinder-scheduler-0\" (UID: \"4c4f17e5-0900-434c-8110-89e781430ded\") " pod="openstack/cinder-scheduler-0" Sep 29 19:16:33 crc kubenswrapper[4792]: I0929 19:16:33.165303 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4c4f17e5-0900-434c-8110-89e781430ded-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"4c4f17e5-0900-434c-8110-89e781430ded\") " pod="openstack/cinder-scheduler-0" Sep 29 19:16:33 crc kubenswrapper[4792]: I0929 19:16:33.165320 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h7fwg\" (UniqueName: \"kubernetes.io/projected/4c4f17e5-0900-434c-8110-89e781430ded-kube-api-access-h7fwg\") pod \"cinder-scheduler-0\" (UID: \"4c4f17e5-0900-434c-8110-89e781430ded\") " pod="openstack/cinder-scheduler-0" Sep 29 19:16:33 crc kubenswrapper[4792]: I0929 19:16:33.165387 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4c4f17e5-0900-434c-8110-89e781430ded-config-data\") pod \"cinder-scheduler-0\" (UID: \"4c4f17e5-0900-434c-8110-89e781430ded\") " pod="openstack/cinder-scheduler-0" Sep 29 19:16:33 crc kubenswrapper[4792]: I0929 19:16:33.165409 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/4c4f17e5-0900-434c-8110-89e781430ded-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"4c4f17e5-0900-434c-8110-89e781430ded\") " pod="openstack/cinder-scheduler-0" Sep 29 19:16:33 crc kubenswrapper[4792]: I0929 19:16:33.267569 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/04e38954-ec57-4d8c-811b-e05b3ca8ba9d-dns-svc\") pod \"dnsmasq-dns-6578955fd5-jn4q7\" (UID: \"04e38954-ec57-4d8c-811b-e05b3ca8ba9d\") " pod="openstack/dnsmasq-dns-6578955fd5-jn4q7" Sep 29 19:16:33 crc kubenswrapper[4792]: I0929 19:16:33.267609 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gqbzq\" (UniqueName: \"kubernetes.io/projected/04e38954-ec57-4d8c-811b-e05b3ca8ba9d-kube-api-access-gqbzq\") pod \"dnsmasq-dns-6578955fd5-jn4q7\" (UID: \"04e38954-ec57-4d8c-811b-e05b3ca8ba9d\") " pod="openstack/dnsmasq-dns-6578955fd5-jn4q7" Sep 29 19:16:33 crc kubenswrapper[4792]: I0929 19:16:33.267672 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/4c4f17e5-0900-434c-8110-89e781430ded-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"4c4f17e5-0900-434c-8110-89e781430ded\") " pod="openstack/cinder-scheduler-0" Sep 29 19:16:33 crc kubenswrapper[4792]: I0929 19:16:33.267700 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4c4f17e5-0900-434c-8110-89e781430ded-scripts\") pod \"cinder-scheduler-0\" (UID: \"4c4f17e5-0900-434c-8110-89e781430ded\") " pod="openstack/cinder-scheduler-0" Sep 29 19:16:33 crc kubenswrapper[4792]: I0929 19:16:33.267734 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/04e38954-ec57-4d8c-811b-e05b3ca8ba9d-config\") pod \"dnsmasq-dns-6578955fd5-jn4q7\" (UID: \"04e38954-ec57-4d8c-811b-e05b3ca8ba9d\") " pod="openstack/dnsmasq-dns-6578955fd5-jn4q7" Sep 29 19:16:33 crc kubenswrapper[4792]: I0929 19:16:33.267755 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4c4f17e5-0900-434c-8110-89e781430ded-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"4c4f17e5-0900-434c-8110-89e781430ded\") " pod="openstack/cinder-scheduler-0" Sep 29 19:16:33 crc kubenswrapper[4792]: I0929 19:16:33.267771 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h7fwg\" (UniqueName: \"kubernetes.io/projected/4c4f17e5-0900-434c-8110-89e781430ded-kube-api-access-h7fwg\") pod \"cinder-scheduler-0\" (UID: \"4c4f17e5-0900-434c-8110-89e781430ded\") " pod="openstack/cinder-scheduler-0" Sep 29 19:16:33 crc kubenswrapper[4792]: I0929 19:16:33.267798 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/04e38954-ec57-4d8c-811b-e05b3ca8ba9d-dns-swift-storage-0\") pod \"dnsmasq-dns-6578955fd5-jn4q7\" (UID: \"04e38954-ec57-4d8c-811b-e05b3ca8ba9d\") " pod="openstack/dnsmasq-dns-6578955fd5-jn4q7" Sep 29 19:16:33 crc kubenswrapper[4792]: I0929 19:16:33.267815 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/04e38954-ec57-4d8c-811b-e05b3ca8ba9d-ovsdbserver-nb\") pod \"dnsmasq-dns-6578955fd5-jn4q7\" (UID: \"04e38954-ec57-4d8c-811b-e05b3ca8ba9d\") " pod="openstack/dnsmasq-dns-6578955fd5-jn4q7" Sep 29 19:16:33 crc kubenswrapper[4792]: I0929 19:16:33.267871 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/04e38954-ec57-4d8c-811b-e05b3ca8ba9d-ovsdbserver-sb\") pod \"dnsmasq-dns-6578955fd5-jn4q7\" (UID: \"04e38954-ec57-4d8c-811b-e05b3ca8ba9d\") " pod="openstack/dnsmasq-dns-6578955fd5-jn4q7" Sep 29 19:16:33 crc kubenswrapper[4792]: I0929 19:16:33.267898 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4c4f17e5-0900-434c-8110-89e781430ded-config-data\") pod \"cinder-scheduler-0\" (UID: \"4c4f17e5-0900-434c-8110-89e781430ded\") " pod="openstack/cinder-scheduler-0" Sep 29 19:16:33 crc kubenswrapper[4792]: I0929 19:16:33.267919 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/4c4f17e5-0900-434c-8110-89e781430ded-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"4c4f17e5-0900-434c-8110-89e781430ded\") " pod="openstack/cinder-scheduler-0" Sep 29 19:16:33 crc kubenswrapper[4792]: I0929 19:16:33.270085 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/4c4f17e5-0900-434c-8110-89e781430ded-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"4c4f17e5-0900-434c-8110-89e781430ded\") " pod="openstack/cinder-scheduler-0" Sep 29 19:16:33 crc kubenswrapper[4792]: I0929 19:16:33.284063 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4c4f17e5-0900-434c-8110-89e781430ded-config-data\") pod \"cinder-scheduler-0\" (UID: \"4c4f17e5-0900-434c-8110-89e781430ded\") " pod="openstack/cinder-scheduler-0" Sep 29 19:16:33 crc kubenswrapper[4792]: I0929 19:16:33.284520 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4c4f17e5-0900-434c-8110-89e781430ded-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"4c4f17e5-0900-434c-8110-89e781430ded\") " pod="openstack/cinder-scheduler-0" Sep 29 19:16:33 crc kubenswrapper[4792]: I0929 19:16:33.284946 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/4c4f17e5-0900-434c-8110-89e781430ded-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"4c4f17e5-0900-434c-8110-89e781430ded\") " pod="openstack/cinder-scheduler-0" Sep 29 19:16:33 crc kubenswrapper[4792]: I0929 19:16:33.292240 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4c4f17e5-0900-434c-8110-89e781430ded-scripts\") pod \"cinder-scheduler-0\" (UID: \"4c4f17e5-0900-434c-8110-89e781430ded\") " pod="openstack/cinder-scheduler-0" Sep 29 19:16:33 crc kubenswrapper[4792]: I0929 19:16:33.292622 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h7fwg\" (UniqueName: \"kubernetes.io/projected/4c4f17e5-0900-434c-8110-89e781430ded-kube-api-access-h7fwg\") pod \"cinder-scheduler-0\" (UID: \"4c4f17e5-0900-434c-8110-89e781430ded\") " pod="openstack/cinder-scheduler-0" Sep 29 19:16:33 crc kubenswrapper[4792]: I0929 19:16:33.329560 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-api-0"] Sep 29 19:16:33 crc kubenswrapper[4792]: I0929 19:16:33.336790 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Sep 29 19:16:33 crc kubenswrapper[4792]: I0929 19:16:33.337301 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Sep 29 19:16:33 crc kubenswrapper[4792]: I0929 19:16:33.349172 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-api-config-data" Sep 29 19:16:33 crc kubenswrapper[4792]: I0929 19:16:33.359501 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Sep 29 19:16:33 crc kubenswrapper[4792]: I0929 19:16:33.370929 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/04e38954-ec57-4d8c-811b-e05b3ca8ba9d-config\") pod \"dnsmasq-dns-6578955fd5-jn4q7\" (UID: \"04e38954-ec57-4d8c-811b-e05b3ca8ba9d\") " pod="openstack/dnsmasq-dns-6578955fd5-jn4q7" Sep 29 19:16:33 crc kubenswrapper[4792]: I0929 19:16:33.370991 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/04e38954-ec57-4d8c-811b-e05b3ca8ba9d-dns-swift-storage-0\") pod \"dnsmasq-dns-6578955fd5-jn4q7\" (UID: \"04e38954-ec57-4d8c-811b-e05b3ca8ba9d\") " pod="openstack/dnsmasq-dns-6578955fd5-jn4q7" Sep 29 19:16:33 crc kubenswrapper[4792]: I0929 19:16:33.371013 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/04e38954-ec57-4d8c-811b-e05b3ca8ba9d-ovsdbserver-nb\") pod \"dnsmasq-dns-6578955fd5-jn4q7\" (UID: \"04e38954-ec57-4d8c-811b-e05b3ca8ba9d\") " pod="openstack/dnsmasq-dns-6578955fd5-jn4q7" Sep 29 19:16:33 crc kubenswrapper[4792]: I0929 19:16:33.371062 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/04e38954-ec57-4d8c-811b-e05b3ca8ba9d-ovsdbserver-sb\") pod \"dnsmasq-dns-6578955fd5-jn4q7\" (UID: \"04e38954-ec57-4d8c-811b-e05b3ca8ba9d\") " pod="openstack/dnsmasq-dns-6578955fd5-jn4q7" Sep 29 19:16:33 crc kubenswrapper[4792]: I0929 19:16:33.371115 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/04e38954-ec57-4d8c-811b-e05b3ca8ba9d-dns-svc\") pod \"dnsmasq-dns-6578955fd5-jn4q7\" (UID: \"04e38954-ec57-4d8c-811b-e05b3ca8ba9d\") " pod="openstack/dnsmasq-dns-6578955fd5-jn4q7" Sep 29 19:16:33 crc kubenswrapper[4792]: I0929 19:16:33.371132 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gqbzq\" (UniqueName: \"kubernetes.io/projected/04e38954-ec57-4d8c-811b-e05b3ca8ba9d-kube-api-access-gqbzq\") pod \"dnsmasq-dns-6578955fd5-jn4q7\" (UID: \"04e38954-ec57-4d8c-811b-e05b3ca8ba9d\") " pod="openstack/dnsmasq-dns-6578955fd5-jn4q7" Sep 29 19:16:33 crc kubenswrapper[4792]: I0929 19:16:33.372805 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/04e38954-ec57-4d8c-811b-e05b3ca8ba9d-config\") pod \"dnsmasq-dns-6578955fd5-jn4q7\" (UID: \"04e38954-ec57-4d8c-811b-e05b3ca8ba9d\") " pod="openstack/dnsmasq-dns-6578955fd5-jn4q7" Sep 29 19:16:33 crc kubenswrapper[4792]: I0929 19:16:33.374088 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/04e38954-ec57-4d8c-811b-e05b3ca8ba9d-dns-swift-storage-0\") pod \"dnsmasq-dns-6578955fd5-jn4q7\" (UID: \"04e38954-ec57-4d8c-811b-e05b3ca8ba9d\") " pod="openstack/dnsmasq-dns-6578955fd5-jn4q7" Sep 29 19:16:33 crc kubenswrapper[4792]: I0929 19:16:33.375052 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/04e38954-ec57-4d8c-811b-e05b3ca8ba9d-dns-svc\") pod \"dnsmasq-dns-6578955fd5-jn4q7\" (UID: \"04e38954-ec57-4d8c-811b-e05b3ca8ba9d\") " pod="openstack/dnsmasq-dns-6578955fd5-jn4q7" Sep 29 19:16:33 crc kubenswrapper[4792]: I0929 19:16:33.375641 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/04e38954-ec57-4d8c-811b-e05b3ca8ba9d-ovsdbserver-nb\") pod \"dnsmasq-dns-6578955fd5-jn4q7\" (UID: \"04e38954-ec57-4d8c-811b-e05b3ca8ba9d\") " pod="openstack/dnsmasq-dns-6578955fd5-jn4q7" Sep 29 19:16:33 crc kubenswrapper[4792]: I0929 19:16:33.379471 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/04e38954-ec57-4d8c-811b-e05b3ca8ba9d-ovsdbserver-sb\") pod \"dnsmasq-dns-6578955fd5-jn4q7\" (UID: \"04e38954-ec57-4d8c-811b-e05b3ca8ba9d\") " pod="openstack/dnsmasq-dns-6578955fd5-jn4q7" Sep 29 19:16:33 crc kubenswrapper[4792]: I0929 19:16:33.410886 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gqbzq\" (UniqueName: \"kubernetes.io/projected/04e38954-ec57-4d8c-811b-e05b3ca8ba9d-kube-api-access-gqbzq\") pod \"dnsmasq-dns-6578955fd5-jn4q7\" (UID: \"04e38954-ec57-4d8c-811b-e05b3ca8ba9d\") " pod="openstack/dnsmasq-dns-6578955fd5-jn4q7" Sep 29 19:16:33 crc kubenswrapper[4792]: I0929 19:16:33.448236 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6578955fd5-jn4q7" Sep 29 19:16:33 crc kubenswrapper[4792]: I0929 19:16:33.469491 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-65c5d84686-mddqt" Sep 29 19:16:33 crc kubenswrapper[4792]: I0929 19:16:33.472364 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dks65\" (UniqueName: \"kubernetes.io/projected/30fffd32-d307-47ec-b239-aeb8dd47ed41-kube-api-access-dks65\") pod \"cinder-api-0\" (UID: \"30fffd32-d307-47ec-b239-aeb8dd47ed41\") " pod="openstack/cinder-api-0" Sep 29 19:16:33 crc kubenswrapper[4792]: I0929 19:16:33.472428 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/30fffd32-d307-47ec-b239-aeb8dd47ed41-logs\") pod \"cinder-api-0\" (UID: \"30fffd32-d307-47ec-b239-aeb8dd47ed41\") " pod="openstack/cinder-api-0" Sep 29 19:16:33 crc kubenswrapper[4792]: I0929 19:16:33.472498 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/30fffd32-d307-47ec-b239-aeb8dd47ed41-config-data-custom\") pod \"cinder-api-0\" (UID: \"30fffd32-d307-47ec-b239-aeb8dd47ed41\") " pod="openstack/cinder-api-0" Sep 29 19:16:33 crc kubenswrapper[4792]: I0929 19:16:33.472526 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/30fffd32-d307-47ec-b239-aeb8dd47ed41-scripts\") pod \"cinder-api-0\" (UID: \"30fffd32-d307-47ec-b239-aeb8dd47ed41\") " pod="openstack/cinder-api-0" Sep 29 19:16:33 crc kubenswrapper[4792]: I0929 19:16:33.472557 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/30fffd32-d307-47ec-b239-aeb8dd47ed41-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"30fffd32-d307-47ec-b239-aeb8dd47ed41\") " pod="openstack/cinder-api-0" Sep 29 19:16:33 crc kubenswrapper[4792]: I0929 19:16:33.472577 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/30fffd32-d307-47ec-b239-aeb8dd47ed41-etc-machine-id\") pod \"cinder-api-0\" (UID: \"30fffd32-d307-47ec-b239-aeb8dd47ed41\") " pod="openstack/cinder-api-0" Sep 29 19:16:33 crc kubenswrapper[4792]: I0929 19:16:33.472596 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/30fffd32-d307-47ec-b239-aeb8dd47ed41-config-data\") pod \"cinder-api-0\" (UID: \"30fffd32-d307-47ec-b239-aeb8dd47ed41\") " pod="openstack/cinder-api-0" Sep 29 19:16:33 crc kubenswrapper[4792]: I0929 19:16:33.574181 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/30fffd32-d307-47ec-b239-aeb8dd47ed41-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"30fffd32-d307-47ec-b239-aeb8dd47ed41\") " pod="openstack/cinder-api-0" Sep 29 19:16:33 crc kubenswrapper[4792]: I0929 19:16:33.574226 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/30fffd32-d307-47ec-b239-aeb8dd47ed41-etc-machine-id\") pod \"cinder-api-0\" (UID: \"30fffd32-d307-47ec-b239-aeb8dd47ed41\") " pod="openstack/cinder-api-0" Sep 29 19:16:33 crc kubenswrapper[4792]: I0929 19:16:33.574246 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/30fffd32-d307-47ec-b239-aeb8dd47ed41-config-data\") pod \"cinder-api-0\" (UID: \"30fffd32-d307-47ec-b239-aeb8dd47ed41\") " pod="openstack/cinder-api-0" Sep 29 19:16:33 crc kubenswrapper[4792]: I0929 19:16:33.574344 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dks65\" (UniqueName: \"kubernetes.io/projected/30fffd32-d307-47ec-b239-aeb8dd47ed41-kube-api-access-dks65\") pod \"cinder-api-0\" (UID: \"30fffd32-d307-47ec-b239-aeb8dd47ed41\") " pod="openstack/cinder-api-0" Sep 29 19:16:33 crc kubenswrapper[4792]: I0929 19:16:33.574393 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/30fffd32-d307-47ec-b239-aeb8dd47ed41-logs\") pod \"cinder-api-0\" (UID: \"30fffd32-d307-47ec-b239-aeb8dd47ed41\") " pod="openstack/cinder-api-0" Sep 29 19:16:33 crc kubenswrapper[4792]: I0929 19:16:33.574446 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/30fffd32-d307-47ec-b239-aeb8dd47ed41-config-data-custom\") pod \"cinder-api-0\" (UID: \"30fffd32-d307-47ec-b239-aeb8dd47ed41\") " pod="openstack/cinder-api-0" Sep 29 19:16:33 crc kubenswrapper[4792]: I0929 19:16:33.574478 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/30fffd32-d307-47ec-b239-aeb8dd47ed41-scripts\") pod \"cinder-api-0\" (UID: \"30fffd32-d307-47ec-b239-aeb8dd47ed41\") " pod="openstack/cinder-api-0" Sep 29 19:16:33 crc kubenswrapper[4792]: I0929 19:16:33.575212 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/30fffd32-d307-47ec-b239-aeb8dd47ed41-etc-machine-id\") pod \"cinder-api-0\" (UID: \"30fffd32-d307-47ec-b239-aeb8dd47ed41\") " pod="openstack/cinder-api-0" Sep 29 19:16:33 crc kubenswrapper[4792]: I0929 19:16:33.575243 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/30fffd32-d307-47ec-b239-aeb8dd47ed41-logs\") pod \"cinder-api-0\" (UID: \"30fffd32-d307-47ec-b239-aeb8dd47ed41\") " pod="openstack/cinder-api-0" Sep 29 19:16:33 crc kubenswrapper[4792]: I0929 19:16:33.577606 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/30fffd32-d307-47ec-b239-aeb8dd47ed41-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"30fffd32-d307-47ec-b239-aeb8dd47ed41\") " pod="openstack/cinder-api-0" Sep 29 19:16:33 crc kubenswrapper[4792]: I0929 19:16:33.580211 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/30fffd32-d307-47ec-b239-aeb8dd47ed41-scripts\") pod \"cinder-api-0\" (UID: \"30fffd32-d307-47ec-b239-aeb8dd47ed41\") " pod="openstack/cinder-api-0" Sep 29 19:16:33 crc kubenswrapper[4792]: I0929 19:16:33.585343 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/30fffd32-d307-47ec-b239-aeb8dd47ed41-config-data\") pod \"cinder-api-0\" (UID: \"30fffd32-d307-47ec-b239-aeb8dd47ed41\") " pod="openstack/cinder-api-0" Sep 29 19:16:33 crc kubenswrapper[4792]: I0929 19:16:33.585754 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/30fffd32-d307-47ec-b239-aeb8dd47ed41-config-data-custom\") pod \"cinder-api-0\" (UID: \"30fffd32-d307-47ec-b239-aeb8dd47ed41\") " pod="openstack/cinder-api-0" Sep 29 19:16:33 crc kubenswrapper[4792]: I0929 19:16:33.599379 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dks65\" (UniqueName: \"kubernetes.io/projected/30fffd32-d307-47ec-b239-aeb8dd47ed41-kube-api-access-dks65\") pod \"cinder-api-0\" (UID: \"30fffd32-d307-47ec-b239-aeb8dd47ed41\") " pod="openstack/cinder-api-0" Sep 29 19:16:33 crc kubenswrapper[4792]: I0929 19:16:33.770101 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Sep 29 19:16:34 crc kubenswrapper[4792]: I0929 19:16:34.200274 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/neutron-554fb67fd9-fh25j" Sep 29 19:16:34 crc kubenswrapper[4792]: I0929 19:16:34.279182 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-76468fd5f8-gfqwb"] Sep 29 19:16:34 crc kubenswrapper[4792]: I0929 19:16:34.279528 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/neutron-76468fd5f8-gfqwb" podUID="e348b56e-dfaa-42ff-b8d4-9ce6d2900cce" containerName="neutron-api" containerID="cri-o://175d124e38feb4de4f16828fc99f2cf7e86deb78620e9e79adee38b902a28506" gracePeriod=30 Sep 29 19:16:34 crc kubenswrapper[4792]: I0929 19:16:34.279988 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/neutron-76468fd5f8-gfqwb" podUID="e348b56e-dfaa-42ff-b8d4-9ce6d2900cce" containerName="neutron-httpd" containerID="cri-o://191e64ed10d4f9889b7edc026e4179cae69c2f165fe2028869de4a6c85e655f8" gracePeriod=30 Sep 29 19:16:34 crc kubenswrapper[4792]: I0929 19:16:34.307156 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6b7b667979-6qkxx" Sep 29 19:16:34 crc kubenswrapper[4792]: I0929 19:16:34.390439 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b7b5b809-50d5-467e-9faf-3d4398f81b15-ovsdbserver-nb\") pod \"b7b5b809-50d5-467e-9faf-3d4398f81b15\" (UID: \"b7b5b809-50d5-467e-9faf-3d4398f81b15\") " Sep 29 19:16:34 crc kubenswrapper[4792]: I0929 19:16:34.391081 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b7b5b809-50d5-467e-9faf-3d4398f81b15-dns-svc\") pod \"b7b5b809-50d5-467e-9faf-3d4398f81b15\" (UID: \"b7b5b809-50d5-467e-9faf-3d4398f81b15\") " Sep 29 19:16:34 crc kubenswrapper[4792]: I0929 19:16:34.391133 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/b7b5b809-50d5-467e-9faf-3d4398f81b15-ovsdbserver-sb\") pod \"b7b5b809-50d5-467e-9faf-3d4398f81b15\" (UID: \"b7b5b809-50d5-467e-9faf-3d4398f81b15\") " Sep 29 19:16:34 crc kubenswrapper[4792]: I0929 19:16:34.391228 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2dfz6\" (UniqueName: \"kubernetes.io/projected/b7b5b809-50d5-467e-9faf-3d4398f81b15-kube-api-access-2dfz6\") pod \"b7b5b809-50d5-467e-9faf-3d4398f81b15\" (UID: \"b7b5b809-50d5-467e-9faf-3d4398f81b15\") " Sep 29 19:16:34 crc kubenswrapper[4792]: I0929 19:16:34.391431 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b7b5b809-50d5-467e-9faf-3d4398f81b15-config\") pod \"b7b5b809-50d5-467e-9faf-3d4398f81b15\" (UID: \"b7b5b809-50d5-467e-9faf-3d4398f81b15\") " Sep 29 19:16:34 crc kubenswrapper[4792]: I0929 19:16:34.391473 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/b7b5b809-50d5-467e-9faf-3d4398f81b15-dns-swift-storage-0\") pod \"b7b5b809-50d5-467e-9faf-3d4398f81b15\" (UID: \"b7b5b809-50d5-467e-9faf-3d4398f81b15\") " Sep 29 19:16:34 crc kubenswrapper[4792]: I0929 19:16:34.417453 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b7b5b809-50d5-467e-9faf-3d4398f81b15-kube-api-access-2dfz6" (OuterVolumeSpecName: "kube-api-access-2dfz6") pod "b7b5b809-50d5-467e-9faf-3d4398f81b15" (UID: "b7b5b809-50d5-467e-9faf-3d4398f81b15"). InnerVolumeSpecName "kube-api-access-2dfz6". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:16:34 crc kubenswrapper[4792]: I0929 19:16:34.495188 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2dfz6\" (UniqueName: \"kubernetes.io/projected/b7b5b809-50d5-467e-9faf-3d4398f81b15-kube-api-access-2dfz6\") on node \"crc\" DevicePath \"\"" Sep 29 19:16:34 crc kubenswrapper[4792]: I0929 19:16:34.557586 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-65c5d84686-mddqt" Sep 29 19:16:34 crc kubenswrapper[4792]: I0929 19:16:34.605672 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b7b5b809-50d5-467e-9faf-3d4398f81b15-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "b7b5b809-50d5-467e-9faf-3d4398f81b15" (UID: "b7b5b809-50d5-467e-9faf-3d4398f81b15"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:16:34 crc kubenswrapper[4792]: I0929 19:16:34.610363 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b7b5b809-50d5-467e-9faf-3d4398f81b15-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "b7b5b809-50d5-467e-9faf-3d4398f81b15" (UID: "b7b5b809-50d5-467e-9faf-3d4398f81b15"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:16:34 crc kubenswrapper[4792]: I0929 19:16:34.642419 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-api-7d954476d6-ngzb5"] Sep 29 19:16:34 crc kubenswrapper[4792]: I0929 19:16:34.642640 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-api-7d954476d6-ngzb5" podUID="7602636e-8093-4eba-90f6-d5fbe8273356" containerName="barbican-api-log" containerID="cri-o://7cae590a7d16113a2c43e151b966b2caa78056c3369b765210716d97c306136b" gracePeriod=30 Sep 29 19:16:34 crc kubenswrapper[4792]: I0929 19:16:34.643076 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-api-7d954476d6-ngzb5" podUID="7602636e-8093-4eba-90f6-d5fbe8273356" containerName="barbican-api" containerID="cri-o://270c89a24d61ed075e395dc11a1acae0e1d4be01ddec7366bb98c2b29771ed0c" gracePeriod=30 Sep 29 19:16:34 crc kubenswrapper[4792]: I0929 19:16:34.646097 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b7b5b809-50d5-467e-9faf-3d4398f81b15-config" (OuterVolumeSpecName: "config") pod "b7b5b809-50d5-467e-9faf-3d4398f81b15" (UID: "b7b5b809-50d5-467e-9faf-3d4398f81b15"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:16:34 crc kubenswrapper[4792]: I0929 19:16:34.683132 4792 generic.go:334] "Generic (PLEG): container finished" podID="e348b56e-dfaa-42ff-b8d4-9ce6d2900cce" containerID="191e64ed10d4f9889b7edc026e4179cae69c2f165fe2028869de4a6c85e655f8" exitCode=0 Sep 29 19:16:34 crc kubenswrapper[4792]: I0929 19:16:34.683212 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-76468fd5f8-gfqwb" event={"ID":"e348b56e-dfaa-42ff-b8d4-9ce6d2900cce","Type":"ContainerDied","Data":"191e64ed10d4f9889b7edc026e4179cae69c2f165fe2028869de4a6c85e655f8"} Sep 29 19:16:34 crc kubenswrapper[4792]: I0929 19:16:34.701307 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b7b5b809-50d5-467e-9faf-3d4398f81b15-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "b7b5b809-50d5-467e-9faf-3d4398f81b15" (UID: "b7b5b809-50d5-467e-9faf-3d4398f81b15"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:16:34 crc kubenswrapper[4792]: I0929 19:16:34.706917 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6b7b667979-6qkxx" event={"ID":"b7b5b809-50d5-467e-9faf-3d4398f81b15","Type":"ContainerDied","Data":"1d4ce2e04b7c26d2f14bfb0a28b44cb95a1d7306c0954008ee37f871280590d3"} Sep 29 19:16:34 crc kubenswrapper[4792]: I0929 19:16:34.706980 4792 scope.go:117] "RemoveContainer" containerID="35b9217f49e1a490d6c94be56e7b3250f2a452ab9417107400a369fa0c6e4228" Sep 29 19:16:34 crc kubenswrapper[4792]: I0929 19:16:34.707127 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6b7b667979-6qkxx" Sep 29 19:16:34 crc kubenswrapper[4792]: I0929 19:16:34.713669 4792 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b7b5b809-50d5-467e-9faf-3d4398f81b15-config\") on node \"crc\" DevicePath \"\"" Sep 29 19:16:34 crc kubenswrapper[4792]: I0929 19:16:34.713704 4792 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/b7b5b809-50d5-467e-9faf-3d4398f81b15-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Sep 29 19:16:34 crc kubenswrapper[4792]: I0929 19:16:34.713715 4792 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b7b5b809-50d5-467e-9faf-3d4398f81b15-dns-svc\") on node \"crc\" DevicePath \"\"" Sep 29 19:16:34 crc kubenswrapper[4792]: I0929 19:16:34.713728 4792 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/b7b5b809-50d5-467e-9faf-3d4398f81b15-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Sep 29 19:16:34 crc kubenswrapper[4792]: I0929 19:16:34.788472 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b7b5b809-50d5-467e-9faf-3d4398f81b15-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "b7b5b809-50d5-467e-9faf-3d4398f81b15" (UID: "b7b5b809-50d5-467e-9faf-3d4398f81b15"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:16:34 crc kubenswrapper[4792]: I0929 19:16:34.830065 4792 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b7b5b809-50d5-467e-9faf-3d4398f81b15-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Sep 29 19:16:34 crc kubenswrapper[4792]: I0929 19:16:34.888127 4792 scope.go:117] "RemoveContainer" containerID="7c4b3d78fa3901e2476b72e1db0c88d929066f3ea05ce3ffb80a100285a57150" Sep 29 19:16:35 crc kubenswrapper[4792]: I0929 19:16:35.074621 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6b7b667979-6qkxx"] Sep 29 19:16:35 crc kubenswrapper[4792]: I0929 19:16:35.087570 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-6b7b667979-6qkxx"] Sep 29 19:16:35 crc kubenswrapper[4792]: E0929 19:16:35.118510 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ceilometer-central-agent\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/ceilometer-0" podUID="06538688-0bb7-45ae-a249-94ba5c312b2b" Sep 29 19:16:35 crc kubenswrapper[4792]: I0929 19:16:35.381624 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Sep 29 19:16:35 crc kubenswrapper[4792]: I0929 19:16:35.442113 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Sep 29 19:16:35 crc kubenswrapper[4792]: W0929 19:16:35.443109 4792 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4c4f17e5_0900_434c_8110_89e781430ded.slice/crio-56f4f4805475cf40a1b27f4966da60bc9e4a062bacf15fc1544925d2d3324be2 WatchSource:0}: Error finding container 56f4f4805475cf40a1b27f4966da60bc9e4a062bacf15fc1544925d2d3324be2: Status 404 returned error can't find the container with id 56f4f4805475cf40a1b27f4966da60bc9e4a062bacf15fc1544925d2d3324be2 Sep 29 19:16:35 crc kubenswrapper[4792]: I0929 19:16:35.473626 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Sep 29 19:16:35 crc kubenswrapper[4792]: I0929 19:16:35.492572 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6578955fd5-jn4q7"] Sep 29 19:16:35 crc kubenswrapper[4792]: I0929 19:16:35.734927 4792 generic.go:334] "Generic (PLEG): container finished" podID="7602636e-8093-4eba-90f6-d5fbe8273356" containerID="7cae590a7d16113a2c43e151b966b2caa78056c3369b765210716d97c306136b" exitCode=143 Sep 29 19:16:35 crc kubenswrapper[4792]: I0929 19:16:35.735291 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-7d954476d6-ngzb5" event={"ID":"7602636e-8093-4eba-90f6-d5fbe8273356","Type":"ContainerDied","Data":"7cae590a7d16113a2c43e151b966b2caa78056c3369b765210716d97c306136b"} Sep 29 19:16:35 crc kubenswrapper[4792]: I0929 19:16:35.736503 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"30fffd32-d307-47ec-b239-aeb8dd47ed41","Type":"ContainerStarted","Data":"73f847753de886d2aab1d5de4349534c5f118dd3ba66a169ddef897e851624a2"} Sep 29 19:16:35 crc kubenswrapper[4792]: I0929 19:16:35.745284 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"06538688-0bb7-45ae-a249-94ba5c312b2b","Type":"ContainerStarted","Data":"293c8989ceb3017c6a21a35f2c87bd0ba72b6587f9e66bc991c493362b121de8"} Sep 29 19:16:35 crc kubenswrapper[4792]: I0929 19:16:35.745446 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="06538688-0bb7-45ae-a249-94ba5c312b2b" containerName="ceilometer-notification-agent" containerID="cri-o://d4b42977ca66bff717dab89a0ce6b7cd93d441d8d89cea411770b90a82e93fcd" gracePeriod=30 Sep 29 19:16:35 crc kubenswrapper[4792]: I0929 19:16:35.746090 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Sep 29 19:16:35 crc kubenswrapper[4792]: I0929 19:16:35.747313 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="06538688-0bb7-45ae-a249-94ba5c312b2b" containerName="proxy-httpd" containerID="cri-o://293c8989ceb3017c6a21a35f2c87bd0ba72b6587f9e66bc991c493362b121de8" gracePeriod=30 Sep 29 19:16:35 crc kubenswrapper[4792]: I0929 19:16:35.747394 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="06538688-0bb7-45ae-a249-94ba5c312b2b" containerName="sg-core" containerID="cri-o://cca98396e3e20b7a3deaee371a5be5bea401ca418acda758f0e1c53f67c91cd1" gracePeriod=30 Sep 29 19:16:35 crc kubenswrapper[4792]: I0929 19:16:35.761518 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6578955fd5-jn4q7" event={"ID":"04e38954-ec57-4d8c-811b-e05b3ca8ba9d","Type":"ContainerStarted","Data":"3bc0a35efd9b16ac19255dae09f8e4d2e0736ded92f6ecb1dcedbdb987c6dcf3"} Sep 29 19:16:35 crc kubenswrapper[4792]: I0929 19:16:35.769260 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"4c4f17e5-0900-434c-8110-89e781430ded","Type":"ContainerStarted","Data":"56f4f4805475cf40a1b27f4966da60bc9e4a062bacf15fc1544925d2d3324be2"} Sep 29 19:16:36 crc kubenswrapper[4792]: I0929 19:16:36.799063 4792 generic.go:334] "Generic (PLEG): container finished" podID="04e38954-ec57-4d8c-811b-e05b3ca8ba9d" containerID="914d73e3665ff5c2b32960268da1543b1d450a8d6ebb7de8ce0ca8b48bf4c1d6" exitCode=0 Sep 29 19:16:36 crc kubenswrapper[4792]: I0929 19:16:36.799223 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6578955fd5-jn4q7" event={"ID":"04e38954-ec57-4d8c-811b-e05b3ca8ba9d","Type":"ContainerDied","Data":"914d73e3665ff5c2b32960268da1543b1d450a8d6ebb7de8ce0ca8b48bf4c1d6"} Sep 29 19:16:36 crc kubenswrapper[4792]: I0929 19:16:36.817037 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"30fffd32-d307-47ec-b239-aeb8dd47ed41","Type":"ContainerStarted","Data":"fba4bc7f1bf62433ff5b0ded0f141d63cbde46b9dafb84191bda76ec8c7a68e2"} Sep 29 19:16:36 crc kubenswrapper[4792]: I0929 19:16:36.852082 4792 generic.go:334] "Generic (PLEG): container finished" podID="06538688-0bb7-45ae-a249-94ba5c312b2b" containerID="293c8989ceb3017c6a21a35f2c87bd0ba72b6587f9e66bc991c493362b121de8" exitCode=0 Sep 29 19:16:36 crc kubenswrapper[4792]: I0929 19:16:36.852396 4792 generic.go:334] "Generic (PLEG): container finished" podID="06538688-0bb7-45ae-a249-94ba5c312b2b" containerID="cca98396e3e20b7a3deaee371a5be5bea401ca418acda758f0e1c53f67c91cd1" exitCode=2 Sep 29 19:16:36 crc kubenswrapper[4792]: I0929 19:16:36.852419 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"06538688-0bb7-45ae-a249-94ba5c312b2b","Type":"ContainerDied","Data":"293c8989ceb3017c6a21a35f2c87bd0ba72b6587f9e66bc991c493362b121de8"} Sep 29 19:16:36 crc kubenswrapper[4792]: I0929 19:16:36.852445 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"06538688-0bb7-45ae-a249-94ba5c312b2b","Type":"ContainerDied","Data":"cca98396e3e20b7a3deaee371a5be5bea401ca418acda758f0e1c53f67c91cd1"} Sep 29 19:16:37 crc kubenswrapper[4792]: I0929 19:16:37.097091 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b7b5b809-50d5-467e-9faf-3d4398f81b15" path="/var/lib/kubelet/pods/b7b5b809-50d5-467e-9faf-3d4398f81b15/volumes" Sep 29 19:16:37 crc kubenswrapper[4792]: I0929 19:16:37.867827 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"30fffd32-d307-47ec-b239-aeb8dd47ed41","Type":"ContainerStarted","Data":"a84173731e815c6cf253d60aba27125d90c40582e9c85ae3097bb2976d5772c9"} Sep 29 19:16:37 crc kubenswrapper[4792]: I0929 19:16:37.869283 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cinder-api-0" Sep 29 19:16:37 crc kubenswrapper[4792]: I0929 19:16:37.867969 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="30fffd32-d307-47ec-b239-aeb8dd47ed41" containerName="cinder-api-log" containerID="cri-o://fba4bc7f1bf62433ff5b0ded0f141d63cbde46b9dafb84191bda76ec8c7a68e2" gracePeriod=30 Sep 29 19:16:37 crc kubenswrapper[4792]: I0929 19:16:37.868012 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="30fffd32-d307-47ec-b239-aeb8dd47ed41" containerName="cinder-api" containerID="cri-o://a84173731e815c6cf253d60aba27125d90c40582e9c85ae3097bb2976d5772c9" gracePeriod=30 Sep 29 19:16:37 crc kubenswrapper[4792]: I0929 19:16:37.873091 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6578955fd5-jn4q7" event={"ID":"04e38954-ec57-4d8c-811b-e05b3ca8ba9d","Type":"ContainerStarted","Data":"e6d70dacef0aaa7cbff2e1e279bdbb0094e89f8a294673540279a4d0279e6d89"} Sep 29 19:16:37 crc kubenswrapper[4792]: I0929 19:16:37.873989 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-6578955fd5-jn4q7" Sep 29 19:16:37 crc kubenswrapper[4792]: I0929 19:16:37.883992 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"4c4f17e5-0900-434c-8110-89e781430ded","Type":"ContainerStarted","Data":"5edf486e84c549c28ab0d97dff78483656c138efe39e20230a7b1676d5128732"} Sep 29 19:16:37 crc kubenswrapper[4792]: I0929 19:16:37.892445 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-api-0" podStartSLOduration=4.892424214 podStartE2EDuration="4.892424214s" podCreationTimestamp="2025-09-29 19:16:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 19:16:37.884538154 +0000 UTC m=+1209.877845560" watchObservedRunningTime="2025-09-29 19:16:37.892424214 +0000 UTC m=+1209.885731610" Sep 29 19:16:38 crc kubenswrapper[4792]: I0929 19:16:38.287586 4792 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-7d954476d6-ngzb5" podUID="7602636e-8093-4eba-90f6-d5fbe8273356" containerName="barbican-api-log" probeResult="failure" output="Get \"http://10.217.0.163:9311/healthcheck\": read tcp 10.217.0.2:58574->10.217.0.163:9311: read: connection reset by peer" Sep 29 19:16:38 crc kubenswrapper[4792]: I0929 19:16:38.287646 4792 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-7d954476d6-ngzb5" podUID="7602636e-8093-4eba-90f6-d5fbe8273356" containerName="barbican-api" probeResult="failure" output="Get \"http://10.217.0.163:9311/healthcheck\": read tcp 10.217.0.2:58568->10.217.0.163:9311: read: connection reset by peer" Sep 29 19:16:38 crc kubenswrapper[4792]: I0929 19:16:38.766381 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-7d954476d6-ngzb5" Sep 29 19:16:38 crc kubenswrapper[4792]: I0929 19:16:38.793404 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-6578955fd5-jn4q7" podStartSLOduration=5.793373642 podStartE2EDuration="5.793373642s" podCreationTimestamp="2025-09-29 19:16:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 19:16:37.925602759 +0000 UTC m=+1209.918910155" watchObservedRunningTime="2025-09-29 19:16:38.793373642 +0000 UTC m=+1210.786681038" Sep 29 19:16:38 crc kubenswrapper[4792]: I0929 19:16:38.825055 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/7602636e-8093-4eba-90f6-d5fbe8273356-config-data-custom\") pod \"7602636e-8093-4eba-90f6-d5fbe8273356\" (UID: \"7602636e-8093-4eba-90f6-d5fbe8273356\") " Sep 29 19:16:38 crc kubenswrapper[4792]: I0929 19:16:38.825135 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7602636e-8093-4eba-90f6-d5fbe8273356-logs\") pod \"7602636e-8093-4eba-90f6-d5fbe8273356\" (UID: \"7602636e-8093-4eba-90f6-d5fbe8273356\") " Sep 29 19:16:38 crc kubenswrapper[4792]: I0929 19:16:38.825224 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8jtn9\" (UniqueName: \"kubernetes.io/projected/7602636e-8093-4eba-90f6-d5fbe8273356-kube-api-access-8jtn9\") pod \"7602636e-8093-4eba-90f6-d5fbe8273356\" (UID: \"7602636e-8093-4eba-90f6-d5fbe8273356\") " Sep 29 19:16:38 crc kubenswrapper[4792]: I0929 19:16:38.825270 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7602636e-8093-4eba-90f6-d5fbe8273356-config-data\") pod \"7602636e-8093-4eba-90f6-d5fbe8273356\" (UID: \"7602636e-8093-4eba-90f6-d5fbe8273356\") " Sep 29 19:16:38 crc kubenswrapper[4792]: I0929 19:16:38.825295 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7602636e-8093-4eba-90f6-d5fbe8273356-combined-ca-bundle\") pod \"7602636e-8093-4eba-90f6-d5fbe8273356\" (UID: \"7602636e-8093-4eba-90f6-d5fbe8273356\") " Sep 29 19:16:38 crc kubenswrapper[4792]: I0929 19:16:38.826107 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7602636e-8093-4eba-90f6-d5fbe8273356-logs" (OuterVolumeSpecName: "logs") pod "7602636e-8093-4eba-90f6-d5fbe8273356" (UID: "7602636e-8093-4eba-90f6-d5fbe8273356"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 19:16:38 crc kubenswrapper[4792]: I0929 19:16:38.834205 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7602636e-8093-4eba-90f6-d5fbe8273356-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "7602636e-8093-4eba-90f6-d5fbe8273356" (UID: "7602636e-8093-4eba-90f6-d5fbe8273356"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:16:38 crc kubenswrapper[4792]: I0929 19:16:38.851782 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7602636e-8093-4eba-90f6-d5fbe8273356-kube-api-access-8jtn9" (OuterVolumeSpecName: "kube-api-access-8jtn9") pod "7602636e-8093-4eba-90f6-d5fbe8273356" (UID: "7602636e-8093-4eba-90f6-d5fbe8273356"). InnerVolumeSpecName "kube-api-access-8jtn9". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:16:38 crc kubenswrapper[4792]: I0929 19:16:38.898075 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7602636e-8093-4eba-90f6-d5fbe8273356-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "7602636e-8093-4eba-90f6-d5fbe8273356" (UID: "7602636e-8093-4eba-90f6-d5fbe8273356"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:16:38 crc kubenswrapper[4792]: I0929 19:16:38.917715 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"4c4f17e5-0900-434c-8110-89e781430ded","Type":"ContainerStarted","Data":"9345e87af987bef25fcb78d3b6c05beb9ab6376336c88fe1036138009a355629"} Sep 29 19:16:38 crc kubenswrapper[4792]: I0929 19:16:38.924973 4792 generic.go:334] "Generic (PLEG): container finished" podID="7602636e-8093-4eba-90f6-d5fbe8273356" containerID="270c89a24d61ed075e395dc11a1acae0e1d4be01ddec7366bb98c2b29771ed0c" exitCode=0 Sep 29 19:16:38 crc kubenswrapper[4792]: I0929 19:16:38.925051 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-7d954476d6-ngzb5" event={"ID":"7602636e-8093-4eba-90f6-d5fbe8273356","Type":"ContainerDied","Data":"270c89a24d61ed075e395dc11a1acae0e1d4be01ddec7366bb98c2b29771ed0c"} Sep 29 19:16:38 crc kubenswrapper[4792]: I0929 19:16:38.925078 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-7d954476d6-ngzb5" event={"ID":"7602636e-8093-4eba-90f6-d5fbe8273356","Type":"ContainerDied","Data":"1b1149a36c8fd58117aa4fc036a15d384bb0b1b06a5eb4e83ef3cada9cc2481f"} Sep 29 19:16:38 crc kubenswrapper[4792]: I0929 19:16:38.925094 4792 scope.go:117] "RemoveContainer" containerID="270c89a24d61ed075e395dc11a1acae0e1d4be01ddec7366bb98c2b29771ed0c" Sep 29 19:16:38 crc kubenswrapper[4792]: I0929 19:16:38.925227 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-7d954476d6-ngzb5" Sep 29 19:16:38 crc kubenswrapper[4792]: I0929 19:16:38.927106 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8jtn9\" (UniqueName: \"kubernetes.io/projected/7602636e-8093-4eba-90f6-d5fbe8273356-kube-api-access-8jtn9\") on node \"crc\" DevicePath \"\"" Sep 29 19:16:38 crc kubenswrapper[4792]: I0929 19:16:38.927441 4792 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7602636e-8093-4eba-90f6-d5fbe8273356-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 19:16:38 crc kubenswrapper[4792]: I0929 19:16:38.927518 4792 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/7602636e-8093-4eba-90f6-d5fbe8273356-config-data-custom\") on node \"crc\" DevicePath \"\"" Sep 29 19:16:38 crc kubenswrapper[4792]: I0929 19:16:38.927588 4792 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7602636e-8093-4eba-90f6-d5fbe8273356-logs\") on node \"crc\" DevicePath \"\"" Sep 29 19:16:38 crc kubenswrapper[4792]: I0929 19:16:38.936945 4792 generic.go:334] "Generic (PLEG): container finished" podID="30fffd32-d307-47ec-b239-aeb8dd47ed41" containerID="fba4bc7f1bf62433ff5b0ded0f141d63cbde46b9dafb84191bda76ec8c7a68e2" exitCode=143 Sep 29 19:16:38 crc kubenswrapper[4792]: I0929 19:16:38.937723 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"30fffd32-d307-47ec-b239-aeb8dd47ed41","Type":"ContainerDied","Data":"fba4bc7f1bf62433ff5b0ded0f141d63cbde46b9dafb84191bda76ec8c7a68e2"} Sep 29 19:16:38 crc kubenswrapper[4792]: I0929 19:16:38.982166 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7602636e-8093-4eba-90f6-d5fbe8273356-config-data" (OuterVolumeSpecName: "config-data") pod "7602636e-8093-4eba-90f6-d5fbe8273356" (UID: "7602636e-8093-4eba-90f6-d5fbe8273356"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:16:38 crc kubenswrapper[4792]: I0929 19:16:38.994865 4792 scope.go:117] "RemoveContainer" containerID="7cae590a7d16113a2c43e151b966b2caa78056c3369b765210716d97c306136b" Sep 29 19:16:39 crc kubenswrapper[4792]: I0929 19:16:39.026152 4792 scope.go:117] "RemoveContainer" containerID="270c89a24d61ed075e395dc11a1acae0e1d4be01ddec7366bb98c2b29771ed0c" Sep 29 19:16:39 crc kubenswrapper[4792]: I0929 19:16:39.038706 4792 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7602636e-8093-4eba-90f6-d5fbe8273356-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 19:16:39 crc kubenswrapper[4792]: E0929 19:16:39.039807 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"270c89a24d61ed075e395dc11a1acae0e1d4be01ddec7366bb98c2b29771ed0c\": container with ID starting with 270c89a24d61ed075e395dc11a1acae0e1d4be01ddec7366bb98c2b29771ed0c not found: ID does not exist" containerID="270c89a24d61ed075e395dc11a1acae0e1d4be01ddec7366bb98c2b29771ed0c" Sep 29 19:16:39 crc kubenswrapper[4792]: I0929 19:16:39.039864 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"270c89a24d61ed075e395dc11a1acae0e1d4be01ddec7366bb98c2b29771ed0c"} err="failed to get container status \"270c89a24d61ed075e395dc11a1acae0e1d4be01ddec7366bb98c2b29771ed0c\": rpc error: code = NotFound desc = could not find container \"270c89a24d61ed075e395dc11a1acae0e1d4be01ddec7366bb98c2b29771ed0c\": container with ID starting with 270c89a24d61ed075e395dc11a1acae0e1d4be01ddec7366bb98c2b29771ed0c not found: ID does not exist" Sep 29 19:16:39 crc kubenswrapper[4792]: I0929 19:16:39.039890 4792 scope.go:117] "RemoveContainer" containerID="7cae590a7d16113a2c43e151b966b2caa78056c3369b765210716d97c306136b" Sep 29 19:16:39 crc kubenswrapper[4792]: E0929 19:16:39.049026 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7cae590a7d16113a2c43e151b966b2caa78056c3369b765210716d97c306136b\": container with ID starting with 7cae590a7d16113a2c43e151b966b2caa78056c3369b765210716d97c306136b not found: ID does not exist" containerID="7cae590a7d16113a2c43e151b966b2caa78056c3369b765210716d97c306136b" Sep 29 19:16:39 crc kubenswrapper[4792]: I0929 19:16:39.049076 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7cae590a7d16113a2c43e151b966b2caa78056c3369b765210716d97c306136b"} err="failed to get container status \"7cae590a7d16113a2c43e151b966b2caa78056c3369b765210716d97c306136b\": rpc error: code = NotFound desc = could not find container \"7cae590a7d16113a2c43e151b966b2caa78056c3369b765210716d97c306136b\": container with ID starting with 7cae590a7d16113a2c43e151b966b2caa78056c3369b765210716d97c306136b not found: ID does not exist" Sep 29 19:16:39 crc kubenswrapper[4792]: I0929 19:16:39.239050 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-scheduler-0" podStartSLOduration=6.264164748 podStartE2EDuration="7.239019367s" podCreationTimestamp="2025-09-29 19:16:32 +0000 UTC" firstStartedPulling="2025-09-29 19:16:35.449228236 +0000 UTC m=+1207.442535622" lastFinishedPulling="2025-09-29 19:16:36.424082855 +0000 UTC m=+1208.417390241" observedRunningTime="2025-09-29 19:16:38.959151653 +0000 UTC m=+1210.952459069" watchObservedRunningTime="2025-09-29 19:16:39.239019367 +0000 UTC m=+1211.232326763" Sep 29 19:16:39 crc kubenswrapper[4792]: I0929 19:16:39.247347 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-api-7d954476d6-ngzb5"] Sep 29 19:16:39 crc kubenswrapper[4792]: I0929 19:16:39.255434 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-api-7d954476d6-ngzb5"] Sep 29 19:16:40 crc kubenswrapper[4792]: I0929 19:16:40.353716 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 29 19:16:40 crc kubenswrapper[4792]: I0929 19:16:40.463032 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-n7bw8\" (UniqueName: \"kubernetes.io/projected/06538688-0bb7-45ae-a249-94ba5c312b2b-kube-api-access-n7bw8\") pod \"06538688-0bb7-45ae-a249-94ba5c312b2b\" (UID: \"06538688-0bb7-45ae-a249-94ba5c312b2b\") " Sep 29 19:16:40 crc kubenswrapper[4792]: I0929 19:16:40.463275 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/06538688-0bb7-45ae-a249-94ba5c312b2b-config-data\") pod \"06538688-0bb7-45ae-a249-94ba5c312b2b\" (UID: \"06538688-0bb7-45ae-a249-94ba5c312b2b\") " Sep 29 19:16:40 crc kubenswrapper[4792]: I0929 19:16:40.463392 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/06538688-0bb7-45ae-a249-94ba5c312b2b-combined-ca-bundle\") pod \"06538688-0bb7-45ae-a249-94ba5c312b2b\" (UID: \"06538688-0bb7-45ae-a249-94ba5c312b2b\") " Sep 29 19:16:40 crc kubenswrapper[4792]: I0929 19:16:40.463521 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/06538688-0bb7-45ae-a249-94ba5c312b2b-run-httpd\") pod \"06538688-0bb7-45ae-a249-94ba5c312b2b\" (UID: \"06538688-0bb7-45ae-a249-94ba5c312b2b\") " Sep 29 19:16:40 crc kubenswrapper[4792]: I0929 19:16:40.463685 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/06538688-0bb7-45ae-a249-94ba5c312b2b-log-httpd\") pod \"06538688-0bb7-45ae-a249-94ba5c312b2b\" (UID: \"06538688-0bb7-45ae-a249-94ba5c312b2b\") " Sep 29 19:16:40 crc kubenswrapper[4792]: I0929 19:16:40.463823 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/06538688-0bb7-45ae-a249-94ba5c312b2b-scripts\") pod \"06538688-0bb7-45ae-a249-94ba5c312b2b\" (UID: \"06538688-0bb7-45ae-a249-94ba5c312b2b\") " Sep 29 19:16:40 crc kubenswrapper[4792]: I0929 19:16:40.463981 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/06538688-0bb7-45ae-a249-94ba5c312b2b-sg-core-conf-yaml\") pod \"06538688-0bb7-45ae-a249-94ba5c312b2b\" (UID: \"06538688-0bb7-45ae-a249-94ba5c312b2b\") " Sep 29 19:16:40 crc kubenswrapper[4792]: I0929 19:16:40.464102 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/06538688-0bb7-45ae-a249-94ba5c312b2b-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "06538688-0bb7-45ae-a249-94ba5c312b2b" (UID: "06538688-0bb7-45ae-a249-94ba5c312b2b"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 19:16:40 crc kubenswrapper[4792]: I0929 19:16:40.464326 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/06538688-0bb7-45ae-a249-94ba5c312b2b-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "06538688-0bb7-45ae-a249-94ba5c312b2b" (UID: "06538688-0bb7-45ae-a249-94ba5c312b2b"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 19:16:40 crc kubenswrapper[4792]: I0929 19:16:40.464689 4792 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/06538688-0bb7-45ae-a249-94ba5c312b2b-run-httpd\") on node \"crc\" DevicePath \"\"" Sep 29 19:16:40 crc kubenswrapper[4792]: I0929 19:16:40.464775 4792 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/06538688-0bb7-45ae-a249-94ba5c312b2b-log-httpd\") on node \"crc\" DevicePath \"\"" Sep 29 19:16:40 crc kubenswrapper[4792]: I0929 19:16:40.473968 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/06538688-0bb7-45ae-a249-94ba5c312b2b-scripts" (OuterVolumeSpecName: "scripts") pod "06538688-0bb7-45ae-a249-94ba5c312b2b" (UID: "06538688-0bb7-45ae-a249-94ba5c312b2b"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:16:40 crc kubenswrapper[4792]: I0929 19:16:40.500471 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/06538688-0bb7-45ae-a249-94ba5c312b2b-kube-api-access-n7bw8" (OuterVolumeSpecName: "kube-api-access-n7bw8") pod "06538688-0bb7-45ae-a249-94ba5c312b2b" (UID: "06538688-0bb7-45ae-a249-94ba5c312b2b"). InnerVolumeSpecName "kube-api-access-n7bw8". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:16:40 crc kubenswrapper[4792]: I0929 19:16:40.522897 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/06538688-0bb7-45ae-a249-94ba5c312b2b-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "06538688-0bb7-45ae-a249-94ba5c312b2b" (UID: "06538688-0bb7-45ae-a249-94ba5c312b2b"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:16:40 crc kubenswrapper[4792]: I0929 19:16:40.535041 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/06538688-0bb7-45ae-a249-94ba5c312b2b-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "06538688-0bb7-45ae-a249-94ba5c312b2b" (UID: "06538688-0bb7-45ae-a249-94ba5c312b2b"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:16:40 crc kubenswrapper[4792]: I0929 19:16:40.542613 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/06538688-0bb7-45ae-a249-94ba5c312b2b-config-data" (OuterVolumeSpecName: "config-data") pod "06538688-0bb7-45ae-a249-94ba5c312b2b" (UID: "06538688-0bb7-45ae-a249-94ba5c312b2b"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:16:40 crc kubenswrapper[4792]: I0929 19:16:40.566100 4792 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/06538688-0bb7-45ae-a249-94ba5c312b2b-scripts\") on node \"crc\" DevicePath \"\"" Sep 29 19:16:40 crc kubenswrapper[4792]: I0929 19:16:40.566137 4792 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/06538688-0bb7-45ae-a249-94ba5c312b2b-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Sep 29 19:16:40 crc kubenswrapper[4792]: I0929 19:16:40.566149 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-n7bw8\" (UniqueName: \"kubernetes.io/projected/06538688-0bb7-45ae-a249-94ba5c312b2b-kube-api-access-n7bw8\") on node \"crc\" DevicePath \"\"" Sep 29 19:16:40 crc kubenswrapper[4792]: I0929 19:16:40.566159 4792 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/06538688-0bb7-45ae-a249-94ba5c312b2b-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 19:16:40 crc kubenswrapper[4792]: I0929 19:16:40.566168 4792 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/06538688-0bb7-45ae-a249-94ba5c312b2b-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 19:16:40 crc kubenswrapper[4792]: I0929 19:16:40.955306 4792 generic.go:334] "Generic (PLEG): container finished" podID="06538688-0bb7-45ae-a249-94ba5c312b2b" containerID="d4b42977ca66bff717dab89a0ce6b7cd93d441d8d89cea411770b90a82e93fcd" exitCode=0 Sep 29 19:16:40 crc kubenswrapper[4792]: I0929 19:16:40.955345 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"06538688-0bb7-45ae-a249-94ba5c312b2b","Type":"ContainerDied","Data":"d4b42977ca66bff717dab89a0ce6b7cd93d441d8d89cea411770b90a82e93fcd"} Sep 29 19:16:40 crc kubenswrapper[4792]: I0929 19:16:40.955369 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 29 19:16:40 crc kubenswrapper[4792]: I0929 19:16:40.955385 4792 scope.go:117] "RemoveContainer" containerID="293c8989ceb3017c6a21a35f2c87bd0ba72b6587f9e66bc991c493362b121de8" Sep 29 19:16:40 crc kubenswrapper[4792]: I0929 19:16:40.955373 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"06538688-0bb7-45ae-a249-94ba5c312b2b","Type":"ContainerDied","Data":"47848e75a05b2a9e7c5bd2dda787011c4dd5a3822b0a55c3bf757e889d4ad62d"} Sep 29 19:16:40 crc kubenswrapper[4792]: I0929 19:16:40.979907 4792 scope.go:117] "RemoveContainer" containerID="cca98396e3e20b7a3deaee371a5be5bea401ca418acda758f0e1c53f67c91cd1" Sep 29 19:16:41 crc kubenswrapper[4792]: I0929 19:16:41.021075 4792 scope.go:117] "RemoveContainer" containerID="d4b42977ca66bff717dab89a0ce6b7cd93d441d8d89cea411770b90a82e93fcd" Sep 29 19:16:41 crc kubenswrapper[4792]: I0929 19:16:41.050111 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7602636e-8093-4eba-90f6-d5fbe8273356" path="/var/lib/kubelet/pods/7602636e-8093-4eba-90f6-d5fbe8273356/volumes" Sep 29 19:16:41 crc kubenswrapper[4792]: I0929 19:16:41.097022 4792 scope.go:117] "RemoveContainer" containerID="293c8989ceb3017c6a21a35f2c87bd0ba72b6587f9e66bc991c493362b121de8" Sep 29 19:16:41 crc kubenswrapper[4792]: I0929 19:16:41.097124 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Sep 29 19:16:41 crc kubenswrapper[4792]: E0929 19:16:41.101284 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"293c8989ceb3017c6a21a35f2c87bd0ba72b6587f9e66bc991c493362b121de8\": container with ID starting with 293c8989ceb3017c6a21a35f2c87bd0ba72b6587f9e66bc991c493362b121de8 not found: ID does not exist" containerID="293c8989ceb3017c6a21a35f2c87bd0ba72b6587f9e66bc991c493362b121de8" Sep 29 19:16:41 crc kubenswrapper[4792]: I0929 19:16:41.101318 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"293c8989ceb3017c6a21a35f2c87bd0ba72b6587f9e66bc991c493362b121de8"} err="failed to get container status \"293c8989ceb3017c6a21a35f2c87bd0ba72b6587f9e66bc991c493362b121de8\": rpc error: code = NotFound desc = could not find container \"293c8989ceb3017c6a21a35f2c87bd0ba72b6587f9e66bc991c493362b121de8\": container with ID starting with 293c8989ceb3017c6a21a35f2c87bd0ba72b6587f9e66bc991c493362b121de8 not found: ID does not exist" Sep 29 19:16:41 crc kubenswrapper[4792]: I0929 19:16:41.101342 4792 scope.go:117] "RemoveContainer" containerID="cca98396e3e20b7a3deaee371a5be5bea401ca418acda758f0e1c53f67c91cd1" Sep 29 19:16:41 crc kubenswrapper[4792]: E0929 19:16:41.102054 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cca98396e3e20b7a3deaee371a5be5bea401ca418acda758f0e1c53f67c91cd1\": container with ID starting with cca98396e3e20b7a3deaee371a5be5bea401ca418acda758f0e1c53f67c91cd1 not found: ID does not exist" containerID="cca98396e3e20b7a3deaee371a5be5bea401ca418acda758f0e1c53f67c91cd1" Sep 29 19:16:41 crc kubenswrapper[4792]: I0929 19:16:41.102071 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cca98396e3e20b7a3deaee371a5be5bea401ca418acda758f0e1c53f67c91cd1"} err="failed to get container status \"cca98396e3e20b7a3deaee371a5be5bea401ca418acda758f0e1c53f67c91cd1\": rpc error: code = NotFound desc = could not find container \"cca98396e3e20b7a3deaee371a5be5bea401ca418acda758f0e1c53f67c91cd1\": container with ID starting with cca98396e3e20b7a3deaee371a5be5bea401ca418acda758f0e1c53f67c91cd1 not found: ID does not exist" Sep 29 19:16:41 crc kubenswrapper[4792]: I0929 19:16:41.102089 4792 scope.go:117] "RemoveContainer" containerID="d4b42977ca66bff717dab89a0ce6b7cd93d441d8d89cea411770b90a82e93fcd" Sep 29 19:16:41 crc kubenswrapper[4792]: E0929 19:16:41.103674 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d4b42977ca66bff717dab89a0ce6b7cd93d441d8d89cea411770b90a82e93fcd\": container with ID starting with d4b42977ca66bff717dab89a0ce6b7cd93d441d8d89cea411770b90a82e93fcd not found: ID does not exist" containerID="d4b42977ca66bff717dab89a0ce6b7cd93d441d8d89cea411770b90a82e93fcd" Sep 29 19:16:41 crc kubenswrapper[4792]: I0929 19:16:41.103703 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d4b42977ca66bff717dab89a0ce6b7cd93d441d8d89cea411770b90a82e93fcd"} err="failed to get container status \"d4b42977ca66bff717dab89a0ce6b7cd93d441d8d89cea411770b90a82e93fcd\": rpc error: code = NotFound desc = could not find container \"d4b42977ca66bff717dab89a0ce6b7cd93d441d8d89cea411770b90a82e93fcd\": container with ID starting with d4b42977ca66bff717dab89a0ce6b7cd93d441d8d89cea411770b90a82e93fcd not found: ID does not exist" Sep 29 19:16:41 crc kubenswrapper[4792]: I0929 19:16:41.109223 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Sep 29 19:16:41 crc kubenswrapper[4792]: I0929 19:16:41.118219 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Sep 29 19:16:41 crc kubenswrapper[4792]: E0929 19:16:41.118577 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b7b5b809-50d5-467e-9faf-3d4398f81b15" containerName="init" Sep 29 19:16:41 crc kubenswrapper[4792]: I0929 19:16:41.118593 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="b7b5b809-50d5-467e-9faf-3d4398f81b15" containerName="init" Sep 29 19:16:41 crc kubenswrapper[4792]: E0929 19:16:41.118602 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="06538688-0bb7-45ae-a249-94ba5c312b2b" containerName="proxy-httpd" Sep 29 19:16:41 crc kubenswrapper[4792]: I0929 19:16:41.118608 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="06538688-0bb7-45ae-a249-94ba5c312b2b" containerName="proxy-httpd" Sep 29 19:16:41 crc kubenswrapper[4792]: E0929 19:16:41.118619 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="06538688-0bb7-45ae-a249-94ba5c312b2b" containerName="ceilometer-notification-agent" Sep 29 19:16:41 crc kubenswrapper[4792]: I0929 19:16:41.118625 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="06538688-0bb7-45ae-a249-94ba5c312b2b" containerName="ceilometer-notification-agent" Sep 29 19:16:41 crc kubenswrapper[4792]: E0929 19:16:41.118633 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7602636e-8093-4eba-90f6-d5fbe8273356" containerName="barbican-api" Sep 29 19:16:41 crc kubenswrapper[4792]: I0929 19:16:41.118638 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="7602636e-8093-4eba-90f6-d5fbe8273356" containerName="barbican-api" Sep 29 19:16:41 crc kubenswrapper[4792]: E0929 19:16:41.118647 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7602636e-8093-4eba-90f6-d5fbe8273356" containerName="barbican-api-log" Sep 29 19:16:41 crc kubenswrapper[4792]: I0929 19:16:41.118653 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="7602636e-8093-4eba-90f6-d5fbe8273356" containerName="barbican-api-log" Sep 29 19:16:41 crc kubenswrapper[4792]: E0929 19:16:41.118660 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="06538688-0bb7-45ae-a249-94ba5c312b2b" containerName="sg-core" Sep 29 19:16:41 crc kubenswrapper[4792]: I0929 19:16:41.118666 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="06538688-0bb7-45ae-a249-94ba5c312b2b" containerName="sg-core" Sep 29 19:16:41 crc kubenswrapper[4792]: E0929 19:16:41.118679 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b7b5b809-50d5-467e-9faf-3d4398f81b15" containerName="dnsmasq-dns" Sep 29 19:16:41 crc kubenswrapper[4792]: I0929 19:16:41.118685 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="b7b5b809-50d5-467e-9faf-3d4398f81b15" containerName="dnsmasq-dns" Sep 29 19:16:41 crc kubenswrapper[4792]: I0929 19:16:41.119186 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="06538688-0bb7-45ae-a249-94ba5c312b2b" containerName="sg-core" Sep 29 19:16:41 crc kubenswrapper[4792]: I0929 19:16:41.119202 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="b7b5b809-50d5-467e-9faf-3d4398f81b15" containerName="dnsmasq-dns" Sep 29 19:16:41 crc kubenswrapper[4792]: I0929 19:16:41.119213 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="06538688-0bb7-45ae-a249-94ba5c312b2b" containerName="ceilometer-notification-agent" Sep 29 19:16:41 crc kubenswrapper[4792]: I0929 19:16:41.119226 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="7602636e-8093-4eba-90f6-d5fbe8273356" containerName="barbican-api-log" Sep 29 19:16:41 crc kubenswrapper[4792]: I0929 19:16:41.119234 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="7602636e-8093-4eba-90f6-d5fbe8273356" containerName="barbican-api" Sep 29 19:16:41 crc kubenswrapper[4792]: I0929 19:16:41.119251 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="06538688-0bb7-45ae-a249-94ba5c312b2b" containerName="proxy-httpd" Sep 29 19:16:41 crc kubenswrapper[4792]: I0929 19:16:41.122781 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 29 19:16:41 crc kubenswrapper[4792]: I0929 19:16:41.132215 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Sep 29 19:16:41 crc kubenswrapper[4792]: I0929 19:16:41.132412 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Sep 29 19:16:41 crc kubenswrapper[4792]: I0929 19:16:41.159537 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Sep 29 19:16:41 crc kubenswrapper[4792]: I0929 19:16:41.186913 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/a38f93d7-b748-4e1f-beff-e89168177090-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"a38f93d7-b748-4e1f-beff-e89168177090\") " pod="openstack/ceilometer-0" Sep 29 19:16:41 crc kubenswrapper[4792]: I0929 19:16:41.186977 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a38f93d7-b748-4e1f-beff-e89168177090-log-httpd\") pod \"ceilometer-0\" (UID: \"a38f93d7-b748-4e1f-beff-e89168177090\") " pod="openstack/ceilometer-0" Sep 29 19:16:41 crc kubenswrapper[4792]: I0929 19:16:41.187023 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a38f93d7-b748-4e1f-beff-e89168177090-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"a38f93d7-b748-4e1f-beff-e89168177090\") " pod="openstack/ceilometer-0" Sep 29 19:16:41 crc kubenswrapper[4792]: I0929 19:16:41.187066 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a38f93d7-b748-4e1f-beff-e89168177090-scripts\") pod \"ceilometer-0\" (UID: \"a38f93d7-b748-4e1f-beff-e89168177090\") " pod="openstack/ceilometer-0" Sep 29 19:16:41 crc kubenswrapper[4792]: I0929 19:16:41.187095 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a38f93d7-b748-4e1f-beff-e89168177090-run-httpd\") pod \"ceilometer-0\" (UID: \"a38f93d7-b748-4e1f-beff-e89168177090\") " pod="openstack/ceilometer-0" Sep 29 19:16:41 crc kubenswrapper[4792]: I0929 19:16:41.187145 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lqklb\" (UniqueName: \"kubernetes.io/projected/a38f93d7-b748-4e1f-beff-e89168177090-kube-api-access-lqklb\") pod \"ceilometer-0\" (UID: \"a38f93d7-b748-4e1f-beff-e89168177090\") " pod="openstack/ceilometer-0" Sep 29 19:16:41 crc kubenswrapper[4792]: I0929 19:16:41.187164 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a38f93d7-b748-4e1f-beff-e89168177090-config-data\") pod \"ceilometer-0\" (UID: \"a38f93d7-b748-4e1f-beff-e89168177090\") " pod="openstack/ceilometer-0" Sep 29 19:16:41 crc kubenswrapper[4792]: I0929 19:16:41.290675 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a38f93d7-b748-4e1f-beff-e89168177090-log-httpd\") pod \"ceilometer-0\" (UID: \"a38f93d7-b748-4e1f-beff-e89168177090\") " pod="openstack/ceilometer-0" Sep 29 19:16:41 crc kubenswrapper[4792]: I0929 19:16:41.291007 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a38f93d7-b748-4e1f-beff-e89168177090-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"a38f93d7-b748-4e1f-beff-e89168177090\") " pod="openstack/ceilometer-0" Sep 29 19:16:41 crc kubenswrapper[4792]: I0929 19:16:41.291051 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a38f93d7-b748-4e1f-beff-e89168177090-scripts\") pod \"ceilometer-0\" (UID: \"a38f93d7-b748-4e1f-beff-e89168177090\") " pod="openstack/ceilometer-0" Sep 29 19:16:41 crc kubenswrapper[4792]: I0929 19:16:41.291086 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a38f93d7-b748-4e1f-beff-e89168177090-run-httpd\") pod \"ceilometer-0\" (UID: \"a38f93d7-b748-4e1f-beff-e89168177090\") " pod="openstack/ceilometer-0" Sep 29 19:16:41 crc kubenswrapper[4792]: I0929 19:16:41.291147 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lqklb\" (UniqueName: \"kubernetes.io/projected/a38f93d7-b748-4e1f-beff-e89168177090-kube-api-access-lqklb\") pod \"ceilometer-0\" (UID: \"a38f93d7-b748-4e1f-beff-e89168177090\") " pod="openstack/ceilometer-0" Sep 29 19:16:41 crc kubenswrapper[4792]: I0929 19:16:41.291171 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a38f93d7-b748-4e1f-beff-e89168177090-config-data\") pod \"ceilometer-0\" (UID: \"a38f93d7-b748-4e1f-beff-e89168177090\") " pod="openstack/ceilometer-0" Sep 29 19:16:41 crc kubenswrapper[4792]: I0929 19:16:41.291211 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/a38f93d7-b748-4e1f-beff-e89168177090-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"a38f93d7-b748-4e1f-beff-e89168177090\") " pod="openstack/ceilometer-0" Sep 29 19:16:41 crc kubenswrapper[4792]: I0929 19:16:41.296151 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a38f93d7-b748-4e1f-beff-e89168177090-run-httpd\") pod \"ceilometer-0\" (UID: \"a38f93d7-b748-4e1f-beff-e89168177090\") " pod="openstack/ceilometer-0" Sep 29 19:16:41 crc kubenswrapper[4792]: I0929 19:16:41.301059 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a38f93d7-b748-4e1f-beff-e89168177090-log-httpd\") pod \"ceilometer-0\" (UID: \"a38f93d7-b748-4e1f-beff-e89168177090\") " pod="openstack/ceilometer-0" Sep 29 19:16:41 crc kubenswrapper[4792]: I0929 19:16:41.301343 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/a38f93d7-b748-4e1f-beff-e89168177090-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"a38f93d7-b748-4e1f-beff-e89168177090\") " pod="openstack/ceilometer-0" Sep 29 19:16:41 crc kubenswrapper[4792]: I0929 19:16:41.301595 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a38f93d7-b748-4e1f-beff-e89168177090-scripts\") pod \"ceilometer-0\" (UID: \"a38f93d7-b748-4e1f-beff-e89168177090\") " pod="openstack/ceilometer-0" Sep 29 19:16:41 crc kubenswrapper[4792]: I0929 19:16:41.318646 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a38f93d7-b748-4e1f-beff-e89168177090-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"a38f93d7-b748-4e1f-beff-e89168177090\") " pod="openstack/ceilometer-0" Sep 29 19:16:41 crc kubenswrapper[4792]: I0929 19:16:41.335797 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a38f93d7-b748-4e1f-beff-e89168177090-config-data\") pod \"ceilometer-0\" (UID: \"a38f93d7-b748-4e1f-beff-e89168177090\") " pod="openstack/ceilometer-0" Sep 29 19:16:41 crc kubenswrapper[4792]: I0929 19:16:41.339658 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lqklb\" (UniqueName: \"kubernetes.io/projected/a38f93d7-b748-4e1f-beff-e89168177090-kube-api-access-lqklb\") pod \"ceilometer-0\" (UID: \"a38f93d7-b748-4e1f-beff-e89168177090\") " pod="openstack/ceilometer-0" Sep 29 19:16:41 crc kubenswrapper[4792]: I0929 19:16:41.461668 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 29 19:16:41 crc kubenswrapper[4792]: I0929 19:16:41.959699 4792 patch_prober.go:28] interesting pod/machine-config-daemon-p5q59 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 19:16:41 crc kubenswrapper[4792]: I0929 19:16:41.960015 4792 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" podUID="0ae66548-086e-4ca9-bd6f-281ce46e7557" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 19:16:42 crc kubenswrapper[4792]: I0929 19:16:42.019157 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Sep 29 19:16:42 crc kubenswrapper[4792]: W0929 19:16:42.026822 4792 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda38f93d7_b748_4e1f_beff_e89168177090.slice/crio-9c1a6d715a4d43a0a6618490d411f20abee29ba7d196166dbc4c8e4998a7039a WatchSource:0}: Error finding container 9c1a6d715a4d43a0a6618490d411f20abee29ba7d196166dbc4c8e4998a7039a: Status 404 returned error can't find the container with id 9c1a6d715a4d43a0a6618490d411f20abee29ba7d196166dbc4c8e4998a7039a Sep 29 19:16:42 crc kubenswrapper[4792]: I0929 19:16:42.963685 4792 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-dfd9c6b56-wq84c" podUID="eb79e1ef-cf0c-407b-9b37-c7ad8d65a3cc" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.150:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.150:8443: connect: connection refused" Sep 29 19:16:42 crc kubenswrapper[4792]: I0929 19:16:42.963761 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/horizon-dfd9c6b56-wq84c" Sep 29 19:16:42 crc kubenswrapper[4792]: I0929 19:16:42.964492 4792 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="horizon" containerStatusID={"Type":"cri-o","ID":"30df7ac6c56065d05590b40fa8b60c11ef56e2f8dbc338a1dc4730d3f00fb6e0"} pod="openstack/horizon-dfd9c6b56-wq84c" containerMessage="Container horizon failed startup probe, will be restarted" Sep 29 19:16:42 crc kubenswrapper[4792]: I0929 19:16:42.964520 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-dfd9c6b56-wq84c" podUID="eb79e1ef-cf0c-407b-9b37-c7ad8d65a3cc" containerName="horizon" containerID="cri-o://30df7ac6c56065d05590b40fa8b60c11ef56e2f8dbc338a1dc4730d3f00fb6e0" gracePeriod=30 Sep 29 19:16:42 crc kubenswrapper[4792]: I0929 19:16:42.992123 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a38f93d7-b748-4e1f-beff-e89168177090","Type":"ContainerStarted","Data":"c2333651cec5d27d20051349a4ed6406532cbb246ab58bf74f004cf9da156d8a"} Sep 29 19:16:42 crc kubenswrapper[4792]: I0929 19:16:42.992177 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a38f93d7-b748-4e1f-beff-e89168177090","Type":"ContainerStarted","Data":"9c1a6d715a4d43a0a6618490d411f20abee29ba7d196166dbc4c8e4998a7039a"} Sep 29 19:16:43 crc kubenswrapper[4792]: I0929 19:16:43.024380 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="06538688-0bb7-45ae-a249-94ba5c312b2b" path="/var/lib/kubelet/pods/06538688-0bb7-45ae-a249-94ba5c312b2b/volumes" Sep 29 19:16:43 crc kubenswrapper[4792]: I0929 19:16:43.078508 4792 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-8494dffd6-7rx5p" podUID="23845288-b122-49f0-b10d-641cfb94b66f" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.151:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.151:8443: connect: connection refused" Sep 29 19:16:43 crc kubenswrapper[4792]: I0929 19:16:43.078580 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/horizon-8494dffd6-7rx5p" Sep 29 19:16:43 crc kubenswrapper[4792]: I0929 19:16:43.079287 4792 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="horizon" containerStatusID={"Type":"cri-o","ID":"e157e75d292130bd4389d439006d4ea52a41ceebecf0771101d06500e2e20e69"} pod="openstack/horizon-8494dffd6-7rx5p" containerMessage="Container horizon failed startup probe, will be restarted" Sep 29 19:16:43 crc kubenswrapper[4792]: I0929 19:16:43.079339 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-8494dffd6-7rx5p" podUID="23845288-b122-49f0-b10d-641cfb94b66f" containerName="horizon" containerID="cri-o://e157e75d292130bd4389d439006d4ea52a41ceebecf0771101d06500e2e20e69" gracePeriod=30 Sep 29 19:16:43 crc kubenswrapper[4792]: I0929 19:16:43.339602 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-scheduler-0" Sep 29 19:16:43 crc kubenswrapper[4792]: I0929 19:16:43.450885 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-6578955fd5-jn4q7" Sep 29 19:16:43 crc kubenswrapper[4792]: I0929 19:16:43.522429 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-848cf88cfc-kqs7x"] Sep 29 19:16:43 crc kubenswrapper[4792]: I0929 19:16:43.523997 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-848cf88cfc-kqs7x" podUID="2465ec19-7e92-4be1-9a98-3d22a1553089" containerName="dnsmasq-dns" containerID="cri-o://0d01e38b3049ecd69e2b514251034484d0b6f9c288a416a91d536117758d34fa" gracePeriod=10 Sep 29 19:16:44 crc kubenswrapper[4792]: I0929 19:16:44.001605 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a38f93d7-b748-4e1f-beff-e89168177090","Type":"ContainerStarted","Data":"b7dccfec0a9e8f86078b8489c984facdbf211ea443e4cff300288d3c30781ba2"} Sep 29 19:16:44 crc kubenswrapper[4792]: I0929 19:16:44.004177 4792 generic.go:334] "Generic (PLEG): container finished" podID="2465ec19-7e92-4be1-9a98-3d22a1553089" containerID="0d01e38b3049ecd69e2b514251034484d0b6f9c288a416a91d536117758d34fa" exitCode=0 Sep 29 19:16:44 crc kubenswrapper[4792]: I0929 19:16:44.004223 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-848cf88cfc-kqs7x" event={"ID":"2465ec19-7e92-4be1-9a98-3d22a1553089","Type":"ContainerDied","Data":"0d01e38b3049ecd69e2b514251034484d0b6f9c288a416a91d536117758d34fa"} Sep 29 19:16:44 crc kubenswrapper[4792]: I0929 19:16:44.308623 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-scheduler-0" Sep 29 19:16:44 crc kubenswrapper[4792]: I0929 19:16:44.374672 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Sep 29 19:16:44 crc kubenswrapper[4792]: I0929 19:16:44.448212 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-848cf88cfc-kqs7x" Sep 29 19:16:44 crc kubenswrapper[4792]: I0929 19:16:44.551426 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2465ec19-7e92-4be1-9a98-3d22a1553089-dns-svc\") pod \"2465ec19-7e92-4be1-9a98-3d22a1553089\" (UID: \"2465ec19-7e92-4be1-9a98-3d22a1553089\") " Sep 29 19:16:44 crc kubenswrapper[4792]: I0929 19:16:44.551531 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/2465ec19-7e92-4be1-9a98-3d22a1553089-ovsdbserver-nb\") pod \"2465ec19-7e92-4be1-9a98-3d22a1553089\" (UID: \"2465ec19-7e92-4be1-9a98-3d22a1553089\") " Sep 29 19:16:44 crc kubenswrapper[4792]: I0929 19:16:44.551584 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/2465ec19-7e92-4be1-9a98-3d22a1553089-dns-swift-storage-0\") pod \"2465ec19-7e92-4be1-9a98-3d22a1553089\" (UID: \"2465ec19-7e92-4be1-9a98-3d22a1553089\") " Sep 29 19:16:44 crc kubenswrapper[4792]: I0929 19:16:44.551690 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2465ec19-7e92-4be1-9a98-3d22a1553089-config\") pod \"2465ec19-7e92-4be1-9a98-3d22a1553089\" (UID: \"2465ec19-7e92-4be1-9a98-3d22a1553089\") " Sep 29 19:16:44 crc kubenswrapper[4792]: I0929 19:16:44.551763 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/2465ec19-7e92-4be1-9a98-3d22a1553089-ovsdbserver-sb\") pod \"2465ec19-7e92-4be1-9a98-3d22a1553089\" (UID: \"2465ec19-7e92-4be1-9a98-3d22a1553089\") " Sep 29 19:16:44 crc kubenswrapper[4792]: I0929 19:16:44.551832 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9w7jn\" (UniqueName: \"kubernetes.io/projected/2465ec19-7e92-4be1-9a98-3d22a1553089-kube-api-access-9w7jn\") pod \"2465ec19-7e92-4be1-9a98-3d22a1553089\" (UID: \"2465ec19-7e92-4be1-9a98-3d22a1553089\") " Sep 29 19:16:44 crc kubenswrapper[4792]: I0929 19:16:44.588985 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2465ec19-7e92-4be1-9a98-3d22a1553089-kube-api-access-9w7jn" (OuterVolumeSpecName: "kube-api-access-9w7jn") pod "2465ec19-7e92-4be1-9a98-3d22a1553089" (UID: "2465ec19-7e92-4be1-9a98-3d22a1553089"). InnerVolumeSpecName "kube-api-access-9w7jn". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:16:44 crc kubenswrapper[4792]: I0929 19:16:44.627368 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2465ec19-7e92-4be1-9a98-3d22a1553089-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "2465ec19-7e92-4be1-9a98-3d22a1553089" (UID: "2465ec19-7e92-4be1-9a98-3d22a1553089"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:16:44 crc kubenswrapper[4792]: I0929 19:16:44.640301 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2465ec19-7e92-4be1-9a98-3d22a1553089-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "2465ec19-7e92-4be1-9a98-3d22a1553089" (UID: "2465ec19-7e92-4be1-9a98-3d22a1553089"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:16:44 crc kubenswrapper[4792]: I0929 19:16:44.659778 4792 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/2465ec19-7e92-4be1-9a98-3d22a1553089-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Sep 29 19:16:44 crc kubenswrapper[4792]: I0929 19:16:44.659829 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9w7jn\" (UniqueName: \"kubernetes.io/projected/2465ec19-7e92-4be1-9a98-3d22a1553089-kube-api-access-9w7jn\") on node \"crc\" DevicePath \"\"" Sep 29 19:16:44 crc kubenswrapper[4792]: I0929 19:16:44.659889 4792 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2465ec19-7e92-4be1-9a98-3d22a1553089-dns-svc\") on node \"crc\" DevicePath \"\"" Sep 29 19:16:44 crc kubenswrapper[4792]: I0929 19:16:44.690647 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2465ec19-7e92-4be1-9a98-3d22a1553089-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "2465ec19-7e92-4be1-9a98-3d22a1553089" (UID: "2465ec19-7e92-4be1-9a98-3d22a1553089"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:16:44 crc kubenswrapper[4792]: I0929 19:16:44.702878 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2465ec19-7e92-4be1-9a98-3d22a1553089-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "2465ec19-7e92-4be1-9a98-3d22a1553089" (UID: "2465ec19-7e92-4be1-9a98-3d22a1553089"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:16:44 crc kubenswrapper[4792]: I0929 19:16:44.719285 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2465ec19-7e92-4be1-9a98-3d22a1553089-config" (OuterVolumeSpecName: "config") pod "2465ec19-7e92-4be1-9a98-3d22a1553089" (UID: "2465ec19-7e92-4be1-9a98-3d22a1553089"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:16:44 crc kubenswrapper[4792]: I0929 19:16:44.761737 4792 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2465ec19-7e92-4be1-9a98-3d22a1553089-config\") on node \"crc\" DevicePath \"\"" Sep 29 19:16:44 crc kubenswrapper[4792]: I0929 19:16:44.761777 4792 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/2465ec19-7e92-4be1-9a98-3d22a1553089-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Sep 29 19:16:44 crc kubenswrapper[4792]: I0929 19:16:44.761791 4792 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/2465ec19-7e92-4be1-9a98-3d22a1553089-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Sep 29 19:16:45 crc kubenswrapper[4792]: I0929 19:16:45.013093 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a38f93d7-b748-4e1f-beff-e89168177090","Type":"ContainerStarted","Data":"898d55096687f9fd3bcc15e938082b53cc537880479c2ec5b2c965b0327fae5f"} Sep 29 19:16:45 crc kubenswrapper[4792]: I0929 19:16:45.015570 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-848cf88cfc-kqs7x" Sep 29 19:16:45 crc kubenswrapper[4792]: I0929 19:16:45.015651 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="4c4f17e5-0900-434c-8110-89e781430ded" containerName="cinder-scheduler" containerID="cri-o://5edf486e84c549c28ab0d97dff78483656c138efe39e20230a7b1676d5128732" gracePeriod=30 Sep 29 19:16:45 crc kubenswrapper[4792]: I0929 19:16:45.015734 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="4c4f17e5-0900-434c-8110-89e781430ded" containerName="probe" containerID="cri-o://9345e87af987bef25fcb78d3b6c05beb9ab6376336c88fe1036138009a355629" gracePeriod=30 Sep 29 19:16:45 crc kubenswrapper[4792]: I0929 19:16:45.041739 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-848cf88cfc-kqs7x" event={"ID":"2465ec19-7e92-4be1-9a98-3d22a1553089","Type":"ContainerDied","Data":"0881e694285eaf97fcb6b371d228d5ef5b5aa7fdfb004571a453cdc2fd63d593"} Sep 29 19:16:45 crc kubenswrapper[4792]: I0929 19:16:45.041788 4792 scope.go:117] "RemoveContainer" containerID="0d01e38b3049ecd69e2b514251034484d0b6f9c288a416a91d536117758d34fa" Sep 29 19:16:45 crc kubenswrapper[4792]: I0929 19:16:45.065899 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-848cf88cfc-kqs7x"] Sep 29 19:16:45 crc kubenswrapper[4792]: I0929 19:16:45.078034 4792 scope.go:117] "RemoveContainer" containerID="76b06e995a838dbc8a199d889e6a5c6c0114e28fed3eb5c8041aa9c81caac7b1" Sep 29 19:16:45 crc kubenswrapper[4792]: I0929 19:16:45.080492 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-848cf88cfc-kqs7x"] Sep 29 19:16:46 crc kubenswrapper[4792]: I0929 19:16:46.026764 4792 generic.go:334] "Generic (PLEG): container finished" podID="e348b56e-dfaa-42ff-b8d4-9ce6d2900cce" containerID="175d124e38feb4de4f16828fc99f2cf7e86deb78620e9e79adee38b902a28506" exitCode=0 Sep 29 19:16:46 crc kubenswrapper[4792]: I0929 19:16:46.026858 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-76468fd5f8-gfqwb" event={"ID":"e348b56e-dfaa-42ff-b8d4-9ce6d2900cce","Type":"ContainerDied","Data":"175d124e38feb4de4f16828fc99f2cf7e86deb78620e9e79adee38b902a28506"} Sep 29 19:16:46 crc kubenswrapper[4792]: I0929 19:16:46.031054 4792 generic.go:334] "Generic (PLEG): container finished" podID="4c4f17e5-0900-434c-8110-89e781430ded" containerID="9345e87af987bef25fcb78d3b6c05beb9ab6376336c88fe1036138009a355629" exitCode=0 Sep 29 19:16:46 crc kubenswrapper[4792]: I0929 19:16:46.031099 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"4c4f17e5-0900-434c-8110-89e781430ded","Type":"ContainerDied","Data":"9345e87af987bef25fcb78d3b6c05beb9ab6376336c88fe1036138009a355629"} Sep 29 19:16:46 crc kubenswrapper[4792]: I0929 19:16:46.472339 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-76468fd5f8-gfqwb" Sep 29 19:16:46 crc kubenswrapper[4792]: I0929 19:16:46.592566 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/e348b56e-dfaa-42ff-b8d4-9ce6d2900cce-httpd-config\") pod \"e348b56e-dfaa-42ff-b8d4-9ce6d2900cce\" (UID: \"e348b56e-dfaa-42ff-b8d4-9ce6d2900cce\") " Sep 29 19:16:46 crc kubenswrapper[4792]: I0929 19:16:46.592654 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e348b56e-dfaa-42ff-b8d4-9ce6d2900cce-combined-ca-bundle\") pod \"e348b56e-dfaa-42ff-b8d4-9ce6d2900cce\" (UID: \"e348b56e-dfaa-42ff-b8d4-9ce6d2900cce\") " Sep 29 19:16:46 crc kubenswrapper[4792]: I0929 19:16:46.592679 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/e348b56e-dfaa-42ff-b8d4-9ce6d2900cce-config\") pod \"e348b56e-dfaa-42ff-b8d4-9ce6d2900cce\" (UID: \"e348b56e-dfaa-42ff-b8d4-9ce6d2900cce\") " Sep 29 19:16:46 crc kubenswrapper[4792]: I0929 19:16:46.592789 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/e348b56e-dfaa-42ff-b8d4-9ce6d2900cce-ovndb-tls-certs\") pod \"e348b56e-dfaa-42ff-b8d4-9ce6d2900cce\" (UID: \"e348b56e-dfaa-42ff-b8d4-9ce6d2900cce\") " Sep 29 19:16:46 crc kubenswrapper[4792]: I0929 19:16:46.592891 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2gpzq\" (UniqueName: \"kubernetes.io/projected/e348b56e-dfaa-42ff-b8d4-9ce6d2900cce-kube-api-access-2gpzq\") pod \"e348b56e-dfaa-42ff-b8d4-9ce6d2900cce\" (UID: \"e348b56e-dfaa-42ff-b8d4-9ce6d2900cce\") " Sep 29 19:16:46 crc kubenswrapper[4792]: I0929 19:16:46.610753 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e348b56e-dfaa-42ff-b8d4-9ce6d2900cce-httpd-config" (OuterVolumeSpecName: "httpd-config") pod "e348b56e-dfaa-42ff-b8d4-9ce6d2900cce" (UID: "e348b56e-dfaa-42ff-b8d4-9ce6d2900cce"). InnerVolumeSpecName "httpd-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:16:46 crc kubenswrapper[4792]: I0929 19:16:46.612167 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e348b56e-dfaa-42ff-b8d4-9ce6d2900cce-kube-api-access-2gpzq" (OuterVolumeSpecName: "kube-api-access-2gpzq") pod "e348b56e-dfaa-42ff-b8d4-9ce6d2900cce" (UID: "e348b56e-dfaa-42ff-b8d4-9ce6d2900cce"). InnerVolumeSpecName "kube-api-access-2gpzq". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:16:46 crc kubenswrapper[4792]: I0929 19:16:46.680806 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e348b56e-dfaa-42ff-b8d4-9ce6d2900cce-config" (OuterVolumeSpecName: "config") pod "e348b56e-dfaa-42ff-b8d4-9ce6d2900cce" (UID: "e348b56e-dfaa-42ff-b8d4-9ce6d2900cce"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:16:46 crc kubenswrapper[4792]: I0929 19:16:46.692157 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e348b56e-dfaa-42ff-b8d4-9ce6d2900cce-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "e348b56e-dfaa-42ff-b8d4-9ce6d2900cce" (UID: "e348b56e-dfaa-42ff-b8d4-9ce6d2900cce"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:16:46 crc kubenswrapper[4792]: I0929 19:16:46.701715 4792 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e348b56e-dfaa-42ff-b8d4-9ce6d2900cce-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 19:16:46 crc kubenswrapper[4792]: I0929 19:16:46.701748 4792 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/e348b56e-dfaa-42ff-b8d4-9ce6d2900cce-config\") on node \"crc\" DevicePath \"\"" Sep 29 19:16:46 crc kubenswrapper[4792]: I0929 19:16:46.701763 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2gpzq\" (UniqueName: \"kubernetes.io/projected/e348b56e-dfaa-42ff-b8d4-9ce6d2900cce-kube-api-access-2gpzq\") on node \"crc\" DevicePath \"\"" Sep 29 19:16:46 crc kubenswrapper[4792]: I0929 19:16:46.701774 4792 reconciler_common.go:293] "Volume detached for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/e348b56e-dfaa-42ff-b8d4-9ce6d2900cce-httpd-config\") on node \"crc\" DevicePath \"\"" Sep 29 19:16:46 crc kubenswrapper[4792]: I0929 19:16:46.713494 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e348b56e-dfaa-42ff-b8d4-9ce6d2900cce-ovndb-tls-certs" (OuterVolumeSpecName: "ovndb-tls-certs") pod "e348b56e-dfaa-42ff-b8d4-9ce6d2900cce" (UID: "e348b56e-dfaa-42ff-b8d4-9ce6d2900cce"). InnerVolumeSpecName "ovndb-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:16:46 crc kubenswrapper[4792]: I0929 19:16:46.803331 4792 reconciler_common.go:293] "Volume detached for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/e348b56e-dfaa-42ff-b8d4-9ce6d2900cce-ovndb-tls-certs\") on node \"crc\" DevicePath \"\"" Sep 29 19:16:47 crc kubenswrapper[4792]: I0929 19:16:47.029115 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2465ec19-7e92-4be1-9a98-3d22a1553089" path="/var/lib/kubelet/pods/2465ec19-7e92-4be1-9a98-3d22a1553089/volumes" Sep 29 19:16:47 crc kubenswrapper[4792]: I0929 19:16:47.063941 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-76468fd5f8-gfqwb" event={"ID":"e348b56e-dfaa-42ff-b8d4-9ce6d2900cce","Type":"ContainerDied","Data":"d03ef07057c448c6e75d2a16cb40d7a5a1473151775904da4ef7a87d77fe1e56"} Sep 29 19:16:47 crc kubenswrapper[4792]: I0929 19:16:47.064009 4792 scope.go:117] "RemoveContainer" containerID="191e64ed10d4f9889b7edc026e4179cae69c2f165fe2028869de4a6c85e655f8" Sep 29 19:16:47 crc kubenswrapper[4792]: I0929 19:16:47.064885 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-76468fd5f8-gfqwb" Sep 29 19:16:47 crc kubenswrapper[4792]: I0929 19:16:47.078878 4792 generic.go:334] "Generic (PLEG): container finished" podID="4c4f17e5-0900-434c-8110-89e781430ded" containerID="5edf486e84c549c28ab0d97dff78483656c138efe39e20230a7b1676d5128732" exitCode=0 Sep 29 19:16:47 crc kubenswrapper[4792]: I0929 19:16:47.079975 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"4c4f17e5-0900-434c-8110-89e781430ded","Type":"ContainerDied","Data":"5edf486e84c549c28ab0d97dff78483656c138efe39e20230a7b1676d5128732"} Sep 29 19:16:47 crc kubenswrapper[4792]: I0929 19:16:47.115941 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-76468fd5f8-gfqwb"] Sep 29 19:16:47 crc kubenswrapper[4792]: I0929 19:16:47.136135 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-76468fd5f8-gfqwb"] Sep 29 19:16:47 crc kubenswrapper[4792]: I0929 19:16:47.144243 4792 scope.go:117] "RemoveContainer" containerID="175d124e38feb4de4f16828fc99f2cf7e86deb78620e9e79adee38b902a28506" Sep 29 19:16:47 crc kubenswrapper[4792]: I0929 19:16:47.193408 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Sep 29 19:16:47 crc kubenswrapper[4792]: I0929 19:16:47.310353 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4c4f17e5-0900-434c-8110-89e781430ded-config-data\") pod \"4c4f17e5-0900-434c-8110-89e781430ded\" (UID: \"4c4f17e5-0900-434c-8110-89e781430ded\") " Sep 29 19:16:47 crc kubenswrapper[4792]: I0929 19:16:47.310453 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4c4f17e5-0900-434c-8110-89e781430ded-combined-ca-bundle\") pod \"4c4f17e5-0900-434c-8110-89e781430ded\" (UID: \"4c4f17e5-0900-434c-8110-89e781430ded\") " Sep 29 19:16:47 crc kubenswrapper[4792]: I0929 19:16:47.310515 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/4c4f17e5-0900-434c-8110-89e781430ded-etc-machine-id\") pod \"4c4f17e5-0900-434c-8110-89e781430ded\" (UID: \"4c4f17e5-0900-434c-8110-89e781430ded\") " Sep 29 19:16:47 crc kubenswrapper[4792]: I0929 19:16:47.310533 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-h7fwg\" (UniqueName: \"kubernetes.io/projected/4c4f17e5-0900-434c-8110-89e781430ded-kube-api-access-h7fwg\") pod \"4c4f17e5-0900-434c-8110-89e781430ded\" (UID: \"4c4f17e5-0900-434c-8110-89e781430ded\") " Sep 29 19:16:47 crc kubenswrapper[4792]: I0929 19:16:47.310555 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/4c4f17e5-0900-434c-8110-89e781430ded-config-data-custom\") pod \"4c4f17e5-0900-434c-8110-89e781430ded\" (UID: \"4c4f17e5-0900-434c-8110-89e781430ded\") " Sep 29 19:16:47 crc kubenswrapper[4792]: I0929 19:16:47.310584 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4c4f17e5-0900-434c-8110-89e781430ded-scripts\") pod \"4c4f17e5-0900-434c-8110-89e781430ded\" (UID: \"4c4f17e5-0900-434c-8110-89e781430ded\") " Sep 29 19:16:47 crc kubenswrapper[4792]: I0929 19:16:47.311014 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/4c4f17e5-0900-434c-8110-89e781430ded-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "4c4f17e5-0900-434c-8110-89e781430ded" (UID: "4c4f17e5-0900-434c-8110-89e781430ded"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 29 19:16:47 crc kubenswrapper[4792]: I0929 19:16:47.322145 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4c4f17e5-0900-434c-8110-89e781430ded-kube-api-access-h7fwg" (OuterVolumeSpecName: "kube-api-access-h7fwg") pod "4c4f17e5-0900-434c-8110-89e781430ded" (UID: "4c4f17e5-0900-434c-8110-89e781430ded"). InnerVolumeSpecName "kube-api-access-h7fwg". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:16:47 crc kubenswrapper[4792]: I0929 19:16:47.322247 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4c4f17e5-0900-434c-8110-89e781430ded-scripts" (OuterVolumeSpecName: "scripts") pod "4c4f17e5-0900-434c-8110-89e781430ded" (UID: "4c4f17e5-0900-434c-8110-89e781430ded"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:16:47 crc kubenswrapper[4792]: I0929 19:16:47.326396 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4c4f17e5-0900-434c-8110-89e781430ded-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "4c4f17e5-0900-434c-8110-89e781430ded" (UID: "4c4f17e5-0900-434c-8110-89e781430ded"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:16:47 crc kubenswrapper[4792]: I0929 19:16:47.393642 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4c4f17e5-0900-434c-8110-89e781430ded-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "4c4f17e5-0900-434c-8110-89e781430ded" (UID: "4c4f17e5-0900-434c-8110-89e781430ded"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:16:47 crc kubenswrapper[4792]: I0929 19:16:47.412497 4792 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4c4f17e5-0900-434c-8110-89e781430ded-scripts\") on node \"crc\" DevicePath \"\"" Sep 29 19:16:47 crc kubenswrapper[4792]: I0929 19:16:47.412543 4792 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4c4f17e5-0900-434c-8110-89e781430ded-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 19:16:47 crc kubenswrapper[4792]: I0929 19:16:47.412554 4792 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/4c4f17e5-0900-434c-8110-89e781430ded-etc-machine-id\") on node \"crc\" DevicePath \"\"" Sep 29 19:16:47 crc kubenswrapper[4792]: I0929 19:16:47.412563 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-h7fwg\" (UniqueName: \"kubernetes.io/projected/4c4f17e5-0900-434c-8110-89e781430ded-kube-api-access-h7fwg\") on node \"crc\" DevicePath \"\"" Sep 29 19:16:47 crc kubenswrapper[4792]: I0929 19:16:47.412573 4792 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/4c4f17e5-0900-434c-8110-89e781430ded-config-data-custom\") on node \"crc\" DevicePath \"\"" Sep 29 19:16:47 crc kubenswrapper[4792]: I0929 19:16:47.436610 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4c4f17e5-0900-434c-8110-89e781430ded-config-data" (OuterVolumeSpecName: "config-data") pod "4c4f17e5-0900-434c-8110-89e781430ded" (UID: "4c4f17e5-0900-434c-8110-89e781430ded"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:16:47 crc kubenswrapper[4792]: I0929 19:16:47.513838 4792 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4c4f17e5-0900-434c-8110-89e781430ded-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 19:16:47 crc kubenswrapper[4792]: I0929 19:16:47.581296 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/placement-666f667548-cm9kb" Sep 29 19:16:47 crc kubenswrapper[4792]: I0929 19:16:47.615694 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/placement-666f667548-cm9kb" Sep 29 19:16:48 crc kubenswrapper[4792]: I0929 19:16:48.094672 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"4c4f17e5-0900-434c-8110-89e781430ded","Type":"ContainerDied","Data":"56f4f4805475cf40a1b27f4966da60bc9e4a062bacf15fc1544925d2d3324be2"} Sep 29 19:16:48 crc kubenswrapper[4792]: I0929 19:16:48.095033 4792 scope.go:117] "RemoveContainer" containerID="9345e87af987bef25fcb78d3b6c05beb9ab6376336c88fe1036138009a355629" Sep 29 19:16:48 crc kubenswrapper[4792]: I0929 19:16:48.094697 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Sep 29 19:16:48 crc kubenswrapper[4792]: I0929 19:16:48.136916 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Sep 29 19:16:48 crc kubenswrapper[4792]: I0929 19:16:48.145095 4792 scope.go:117] "RemoveContainer" containerID="5edf486e84c549c28ab0d97dff78483656c138efe39e20230a7b1676d5128732" Sep 29 19:16:48 crc kubenswrapper[4792]: I0929 19:16:48.150101 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-scheduler-0"] Sep 29 19:16:48 crc kubenswrapper[4792]: I0929 19:16:48.173602 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-scheduler-0"] Sep 29 19:16:48 crc kubenswrapper[4792]: E0929 19:16:48.174010 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e348b56e-dfaa-42ff-b8d4-9ce6d2900cce" containerName="neutron-api" Sep 29 19:16:48 crc kubenswrapper[4792]: I0929 19:16:48.174025 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="e348b56e-dfaa-42ff-b8d4-9ce6d2900cce" containerName="neutron-api" Sep 29 19:16:48 crc kubenswrapper[4792]: E0929 19:16:48.174048 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e348b56e-dfaa-42ff-b8d4-9ce6d2900cce" containerName="neutron-httpd" Sep 29 19:16:48 crc kubenswrapper[4792]: I0929 19:16:48.174055 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="e348b56e-dfaa-42ff-b8d4-9ce6d2900cce" containerName="neutron-httpd" Sep 29 19:16:48 crc kubenswrapper[4792]: E0929 19:16:48.174074 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4c4f17e5-0900-434c-8110-89e781430ded" containerName="probe" Sep 29 19:16:48 crc kubenswrapper[4792]: I0929 19:16:48.174080 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="4c4f17e5-0900-434c-8110-89e781430ded" containerName="probe" Sep 29 19:16:48 crc kubenswrapper[4792]: E0929 19:16:48.174096 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2465ec19-7e92-4be1-9a98-3d22a1553089" containerName="dnsmasq-dns" Sep 29 19:16:48 crc kubenswrapper[4792]: I0929 19:16:48.174102 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="2465ec19-7e92-4be1-9a98-3d22a1553089" containerName="dnsmasq-dns" Sep 29 19:16:48 crc kubenswrapper[4792]: E0929 19:16:48.174117 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2465ec19-7e92-4be1-9a98-3d22a1553089" containerName="init" Sep 29 19:16:48 crc kubenswrapper[4792]: I0929 19:16:48.174123 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="2465ec19-7e92-4be1-9a98-3d22a1553089" containerName="init" Sep 29 19:16:48 crc kubenswrapper[4792]: E0929 19:16:48.174132 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4c4f17e5-0900-434c-8110-89e781430ded" containerName="cinder-scheduler" Sep 29 19:16:48 crc kubenswrapper[4792]: I0929 19:16:48.174138 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="4c4f17e5-0900-434c-8110-89e781430ded" containerName="cinder-scheduler" Sep 29 19:16:48 crc kubenswrapper[4792]: I0929 19:16:48.174291 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="4c4f17e5-0900-434c-8110-89e781430ded" containerName="cinder-scheduler" Sep 29 19:16:48 crc kubenswrapper[4792]: I0929 19:16:48.174304 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="e348b56e-dfaa-42ff-b8d4-9ce6d2900cce" containerName="neutron-httpd" Sep 29 19:16:48 crc kubenswrapper[4792]: I0929 19:16:48.174312 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="4c4f17e5-0900-434c-8110-89e781430ded" containerName="probe" Sep 29 19:16:48 crc kubenswrapper[4792]: I0929 19:16:48.174322 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="2465ec19-7e92-4be1-9a98-3d22a1553089" containerName="dnsmasq-dns" Sep 29 19:16:48 crc kubenswrapper[4792]: I0929 19:16:48.174335 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="e348b56e-dfaa-42ff-b8d4-9ce6d2900cce" containerName="neutron-api" Sep 29 19:16:48 crc kubenswrapper[4792]: I0929 19:16:48.176433 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Sep 29 19:16:48 crc kubenswrapper[4792]: I0929 19:16:48.182033 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scheduler-config-data" Sep 29 19:16:48 crc kubenswrapper[4792]: I0929 19:16:48.182568 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Sep 29 19:16:48 crc kubenswrapper[4792]: I0929 19:16:48.207539 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/cinder-api-0" Sep 29 19:16:48 crc kubenswrapper[4792]: I0929 19:16:48.226765 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/705349a9-36dc-4230-a7ff-e097fc5b66d7-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"705349a9-36dc-4230-a7ff-e097fc5b66d7\") " pod="openstack/cinder-scheduler-0" Sep 29 19:16:48 crc kubenswrapper[4792]: I0929 19:16:48.226825 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-chzhv\" (UniqueName: \"kubernetes.io/projected/705349a9-36dc-4230-a7ff-e097fc5b66d7-kube-api-access-chzhv\") pod \"cinder-scheduler-0\" (UID: \"705349a9-36dc-4230-a7ff-e097fc5b66d7\") " pod="openstack/cinder-scheduler-0" Sep 29 19:16:48 crc kubenswrapper[4792]: I0929 19:16:48.226864 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/705349a9-36dc-4230-a7ff-e097fc5b66d7-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"705349a9-36dc-4230-a7ff-e097fc5b66d7\") " pod="openstack/cinder-scheduler-0" Sep 29 19:16:48 crc kubenswrapper[4792]: I0929 19:16:48.227206 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/705349a9-36dc-4230-a7ff-e097fc5b66d7-scripts\") pod \"cinder-scheduler-0\" (UID: \"705349a9-36dc-4230-a7ff-e097fc5b66d7\") " pod="openstack/cinder-scheduler-0" Sep 29 19:16:48 crc kubenswrapper[4792]: I0929 19:16:48.227417 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/705349a9-36dc-4230-a7ff-e097fc5b66d7-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"705349a9-36dc-4230-a7ff-e097fc5b66d7\") " pod="openstack/cinder-scheduler-0" Sep 29 19:16:48 crc kubenswrapper[4792]: I0929 19:16:48.227506 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/705349a9-36dc-4230-a7ff-e097fc5b66d7-config-data\") pod \"cinder-scheduler-0\" (UID: \"705349a9-36dc-4230-a7ff-e097fc5b66d7\") " pod="openstack/cinder-scheduler-0" Sep 29 19:16:48 crc kubenswrapper[4792]: I0929 19:16:48.329524 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/705349a9-36dc-4230-a7ff-e097fc5b66d7-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"705349a9-36dc-4230-a7ff-e097fc5b66d7\") " pod="openstack/cinder-scheduler-0" Sep 29 19:16:48 crc kubenswrapper[4792]: I0929 19:16:48.329591 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/705349a9-36dc-4230-a7ff-e097fc5b66d7-config-data\") pod \"cinder-scheduler-0\" (UID: \"705349a9-36dc-4230-a7ff-e097fc5b66d7\") " pod="openstack/cinder-scheduler-0" Sep 29 19:16:48 crc kubenswrapper[4792]: I0929 19:16:48.329736 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/705349a9-36dc-4230-a7ff-e097fc5b66d7-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"705349a9-36dc-4230-a7ff-e097fc5b66d7\") " pod="openstack/cinder-scheduler-0" Sep 29 19:16:48 crc kubenswrapper[4792]: I0929 19:16:48.329781 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-chzhv\" (UniqueName: \"kubernetes.io/projected/705349a9-36dc-4230-a7ff-e097fc5b66d7-kube-api-access-chzhv\") pod \"cinder-scheduler-0\" (UID: \"705349a9-36dc-4230-a7ff-e097fc5b66d7\") " pod="openstack/cinder-scheduler-0" Sep 29 19:16:48 crc kubenswrapper[4792]: I0929 19:16:48.329809 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/705349a9-36dc-4230-a7ff-e097fc5b66d7-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"705349a9-36dc-4230-a7ff-e097fc5b66d7\") " pod="openstack/cinder-scheduler-0" Sep 29 19:16:48 crc kubenswrapper[4792]: I0929 19:16:48.329937 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/705349a9-36dc-4230-a7ff-e097fc5b66d7-scripts\") pod \"cinder-scheduler-0\" (UID: \"705349a9-36dc-4230-a7ff-e097fc5b66d7\") " pod="openstack/cinder-scheduler-0" Sep 29 19:16:48 crc kubenswrapper[4792]: I0929 19:16:48.330535 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/705349a9-36dc-4230-a7ff-e097fc5b66d7-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"705349a9-36dc-4230-a7ff-e097fc5b66d7\") " pod="openstack/cinder-scheduler-0" Sep 29 19:16:48 crc kubenswrapper[4792]: I0929 19:16:48.336757 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/705349a9-36dc-4230-a7ff-e097fc5b66d7-scripts\") pod \"cinder-scheduler-0\" (UID: \"705349a9-36dc-4230-a7ff-e097fc5b66d7\") " pod="openstack/cinder-scheduler-0" Sep 29 19:16:48 crc kubenswrapper[4792]: I0929 19:16:48.337488 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/705349a9-36dc-4230-a7ff-e097fc5b66d7-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"705349a9-36dc-4230-a7ff-e097fc5b66d7\") " pod="openstack/cinder-scheduler-0" Sep 29 19:16:48 crc kubenswrapper[4792]: I0929 19:16:48.343340 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/705349a9-36dc-4230-a7ff-e097fc5b66d7-config-data\") pod \"cinder-scheduler-0\" (UID: \"705349a9-36dc-4230-a7ff-e097fc5b66d7\") " pod="openstack/cinder-scheduler-0" Sep 29 19:16:48 crc kubenswrapper[4792]: I0929 19:16:48.351351 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/705349a9-36dc-4230-a7ff-e097fc5b66d7-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"705349a9-36dc-4230-a7ff-e097fc5b66d7\") " pod="openstack/cinder-scheduler-0" Sep 29 19:16:48 crc kubenswrapper[4792]: I0929 19:16:48.356399 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-chzhv\" (UniqueName: \"kubernetes.io/projected/705349a9-36dc-4230-a7ff-e097fc5b66d7-kube-api-access-chzhv\") pod \"cinder-scheduler-0\" (UID: \"705349a9-36dc-4230-a7ff-e097fc5b66d7\") " pod="openstack/cinder-scheduler-0" Sep 29 19:16:48 crc kubenswrapper[4792]: I0929 19:16:48.509269 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Sep 29 19:16:48 crc kubenswrapper[4792]: I0929 19:16:48.864758 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Sep 29 19:16:49 crc kubenswrapper[4792]: I0929 19:16:49.046329 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4c4f17e5-0900-434c-8110-89e781430ded" path="/var/lib/kubelet/pods/4c4f17e5-0900-434c-8110-89e781430ded/volumes" Sep 29 19:16:49 crc kubenswrapper[4792]: I0929 19:16:49.048414 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e348b56e-dfaa-42ff-b8d4-9ce6d2900cce" path="/var/lib/kubelet/pods/e348b56e-dfaa-42ff-b8d4-9ce6d2900cce/volumes" Sep 29 19:16:49 crc kubenswrapper[4792]: I0929 19:16:49.158346 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a38f93d7-b748-4e1f-beff-e89168177090","Type":"ContainerStarted","Data":"ce3cb60d2edb606aed366ce6ca6ef0a742440836731698d163e6693f5a6c1c06"} Sep 29 19:16:49 crc kubenswrapper[4792]: I0929 19:16:49.159596 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Sep 29 19:16:49 crc kubenswrapper[4792]: I0929 19:16:49.161302 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"705349a9-36dc-4230-a7ff-e097fc5b66d7","Type":"ContainerStarted","Data":"4feb561360a06fef2bb07ff9557ac08ee7e872294774f765b292a553e039cc6c"} Sep 29 19:16:49 crc kubenswrapper[4792]: I0929 19:16:49.518675 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/keystone-754c4b8fcb-w2t8n" Sep 29 19:16:49 crc kubenswrapper[4792]: I0929 19:16:49.547770 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=1.82790765 podStartE2EDuration="8.547752393s" podCreationTimestamp="2025-09-29 19:16:41 +0000 UTC" firstStartedPulling="2025-09-29 19:16:42.030476383 +0000 UTC m=+1214.023783779" lastFinishedPulling="2025-09-29 19:16:48.750321126 +0000 UTC m=+1220.743628522" observedRunningTime="2025-09-29 19:16:49.190423773 +0000 UTC m=+1221.183731169" watchObservedRunningTime="2025-09-29 19:16:49.547752393 +0000 UTC m=+1221.541059789" Sep 29 19:16:50 crc kubenswrapper[4792]: I0929 19:16:50.175377 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"705349a9-36dc-4230-a7ff-e097fc5b66d7","Type":"ContainerStarted","Data":"241333932a5788d3fe57d6589f8403b477f0a6ed9e7d400422de3f254102202a"} Sep 29 19:16:50 crc kubenswrapper[4792]: I0929 19:16:50.895234 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstackclient"] Sep 29 19:16:50 crc kubenswrapper[4792]: I0929 19:16:50.896511 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Sep 29 19:16:50 crc kubenswrapper[4792]: I0929 19:16:50.912422 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-config" Sep 29 19:16:50 crc kubenswrapper[4792]: I0929 19:16:50.912663 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstackclient-openstackclient-dockercfg-wx2hj" Sep 29 19:16:50 crc kubenswrapper[4792]: I0929 19:16:50.935779 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Sep 29 19:16:50 crc kubenswrapper[4792]: I0929 19:16:50.950133 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-config-secret" Sep 29 19:16:50 crc kubenswrapper[4792]: I0929 19:16:50.985002 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/36d9080e-6ba5-4a59-ac59-21f8a868df0d-openstack-config-secret\") pod \"openstackclient\" (UID: \"36d9080e-6ba5-4a59-ac59-21f8a868df0d\") " pod="openstack/openstackclient" Sep 29 19:16:50 crc kubenswrapper[4792]: I0929 19:16:50.985047 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/36d9080e-6ba5-4a59-ac59-21f8a868df0d-openstack-config\") pod \"openstackclient\" (UID: \"36d9080e-6ba5-4a59-ac59-21f8a868df0d\") " pod="openstack/openstackclient" Sep 29 19:16:50 crc kubenswrapper[4792]: I0929 19:16:50.985075 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/36d9080e-6ba5-4a59-ac59-21f8a868df0d-combined-ca-bundle\") pod \"openstackclient\" (UID: \"36d9080e-6ba5-4a59-ac59-21f8a868df0d\") " pod="openstack/openstackclient" Sep 29 19:16:50 crc kubenswrapper[4792]: I0929 19:16:50.985188 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qpxrr\" (UniqueName: \"kubernetes.io/projected/36d9080e-6ba5-4a59-ac59-21f8a868df0d-kube-api-access-qpxrr\") pod \"openstackclient\" (UID: \"36d9080e-6ba5-4a59-ac59-21f8a868df0d\") " pod="openstack/openstackclient" Sep 29 19:16:51 crc kubenswrapper[4792]: I0929 19:16:51.086872 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/36d9080e-6ba5-4a59-ac59-21f8a868df0d-openstack-config-secret\") pod \"openstackclient\" (UID: \"36d9080e-6ba5-4a59-ac59-21f8a868df0d\") " pod="openstack/openstackclient" Sep 29 19:16:51 crc kubenswrapper[4792]: I0929 19:16:51.086928 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/36d9080e-6ba5-4a59-ac59-21f8a868df0d-openstack-config\") pod \"openstackclient\" (UID: \"36d9080e-6ba5-4a59-ac59-21f8a868df0d\") " pod="openstack/openstackclient" Sep 29 19:16:51 crc kubenswrapper[4792]: I0929 19:16:51.087002 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/36d9080e-6ba5-4a59-ac59-21f8a868df0d-combined-ca-bundle\") pod \"openstackclient\" (UID: \"36d9080e-6ba5-4a59-ac59-21f8a868df0d\") " pod="openstack/openstackclient" Sep 29 19:16:51 crc kubenswrapper[4792]: I0929 19:16:51.087381 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qpxrr\" (UniqueName: \"kubernetes.io/projected/36d9080e-6ba5-4a59-ac59-21f8a868df0d-kube-api-access-qpxrr\") pod \"openstackclient\" (UID: \"36d9080e-6ba5-4a59-ac59-21f8a868df0d\") " pod="openstack/openstackclient" Sep 29 19:16:51 crc kubenswrapper[4792]: I0929 19:16:51.088093 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/36d9080e-6ba5-4a59-ac59-21f8a868df0d-openstack-config\") pod \"openstackclient\" (UID: \"36d9080e-6ba5-4a59-ac59-21f8a868df0d\") " pod="openstack/openstackclient" Sep 29 19:16:51 crc kubenswrapper[4792]: I0929 19:16:51.096333 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/36d9080e-6ba5-4a59-ac59-21f8a868df0d-openstack-config-secret\") pod \"openstackclient\" (UID: \"36d9080e-6ba5-4a59-ac59-21f8a868df0d\") " pod="openstack/openstackclient" Sep 29 19:16:51 crc kubenswrapper[4792]: I0929 19:16:51.102831 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/36d9080e-6ba5-4a59-ac59-21f8a868df0d-combined-ca-bundle\") pod \"openstackclient\" (UID: \"36d9080e-6ba5-4a59-ac59-21f8a868df0d\") " pod="openstack/openstackclient" Sep 29 19:16:51 crc kubenswrapper[4792]: I0929 19:16:51.114758 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qpxrr\" (UniqueName: \"kubernetes.io/projected/36d9080e-6ba5-4a59-ac59-21f8a868df0d-kube-api-access-qpxrr\") pod \"openstackclient\" (UID: \"36d9080e-6ba5-4a59-ac59-21f8a868df0d\") " pod="openstack/openstackclient" Sep 29 19:16:51 crc kubenswrapper[4792]: I0929 19:16:51.199193 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"705349a9-36dc-4230-a7ff-e097fc5b66d7","Type":"ContainerStarted","Data":"35af277664f2f127ab5ef8cc00ca2e0c02b75e932e86409b6e5df43b2ad222e3"} Sep 29 19:16:51 crc kubenswrapper[4792]: I0929 19:16:51.229716 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Sep 29 19:16:51 crc kubenswrapper[4792]: I0929 19:16:51.862127 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-scheduler-0" podStartSLOduration=3.862111976 podStartE2EDuration="3.862111976s" podCreationTimestamp="2025-09-29 19:16:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 19:16:51.221953113 +0000 UTC m=+1223.215260519" watchObservedRunningTime="2025-09-29 19:16:51.862111976 +0000 UTC m=+1223.855419372" Sep 29 19:16:51 crc kubenswrapper[4792]: I0929 19:16:51.868698 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Sep 29 19:16:51 crc kubenswrapper[4792]: W0929 19:16:51.883267 4792 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod36d9080e_6ba5_4a59_ac59_21f8a868df0d.slice/crio-313f715d68dc96ca5b5ec04c140e1a142f6fe8bc21264b06f3482f1a3b96ac0b WatchSource:0}: Error finding container 313f715d68dc96ca5b5ec04c140e1a142f6fe8bc21264b06f3482f1a3b96ac0b: Status 404 returned error can't find the container with id 313f715d68dc96ca5b5ec04c140e1a142f6fe8bc21264b06f3482f1a3b96ac0b Sep 29 19:16:52 crc kubenswrapper[4792]: I0929 19:16:52.206312 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"36d9080e-6ba5-4a59-ac59-21f8a868df0d","Type":"ContainerStarted","Data":"313f715d68dc96ca5b5ec04c140e1a142f6fe8bc21264b06f3482f1a3b96ac0b"} Sep 29 19:16:53 crc kubenswrapper[4792]: I0929 19:16:53.510293 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-scheduler-0" Sep 29 19:16:54 crc kubenswrapper[4792]: I0929 19:16:54.754990 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-proxy-5dd4fd546c-9hwf9"] Sep 29 19:16:54 crc kubenswrapper[4792]: I0929 19:16:54.756707 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-proxy-5dd4fd546c-9hwf9" Sep 29 19:16:54 crc kubenswrapper[4792]: I0929 19:16:54.759117 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-swift-internal-svc" Sep 29 19:16:54 crc kubenswrapper[4792]: I0929 19:16:54.759416 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-swift-public-svc" Sep 29 19:16:54 crc kubenswrapper[4792]: I0929 19:16:54.759733 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-proxy-config-data" Sep 29 19:16:54 crc kubenswrapper[4792]: I0929 19:16:54.778599 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-proxy-5dd4fd546c-9hwf9"] Sep 29 19:16:54 crc kubenswrapper[4792]: I0929 19:16:54.949017 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gwfjb\" (UniqueName: \"kubernetes.io/projected/8d8f74d0-be39-457e-ad50-c21d43cc942e-kube-api-access-gwfjb\") pod \"swift-proxy-5dd4fd546c-9hwf9\" (UID: \"8d8f74d0-be39-457e-ad50-c21d43cc942e\") " pod="openstack/swift-proxy-5dd4fd546c-9hwf9" Sep 29 19:16:54 crc kubenswrapper[4792]: I0929 19:16:54.949706 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8d8f74d0-be39-457e-ad50-c21d43cc942e-combined-ca-bundle\") pod \"swift-proxy-5dd4fd546c-9hwf9\" (UID: \"8d8f74d0-be39-457e-ad50-c21d43cc942e\") " pod="openstack/swift-proxy-5dd4fd546c-9hwf9" Sep 29 19:16:54 crc kubenswrapper[4792]: I0929 19:16:54.949768 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/8d8f74d0-be39-457e-ad50-c21d43cc942e-etc-swift\") pod \"swift-proxy-5dd4fd546c-9hwf9\" (UID: \"8d8f74d0-be39-457e-ad50-c21d43cc942e\") " pod="openstack/swift-proxy-5dd4fd546c-9hwf9" Sep 29 19:16:54 crc kubenswrapper[4792]: I0929 19:16:54.949809 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8d8f74d0-be39-457e-ad50-c21d43cc942e-log-httpd\") pod \"swift-proxy-5dd4fd546c-9hwf9\" (UID: \"8d8f74d0-be39-457e-ad50-c21d43cc942e\") " pod="openstack/swift-proxy-5dd4fd546c-9hwf9" Sep 29 19:16:54 crc kubenswrapper[4792]: I0929 19:16:54.950046 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8d8f74d0-be39-457e-ad50-c21d43cc942e-config-data\") pod \"swift-proxy-5dd4fd546c-9hwf9\" (UID: \"8d8f74d0-be39-457e-ad50-c21d43cc942e\") " pod="openstack/swift-proxy-5dd4fd546c-9hwf9" Sep 29 19:16:54 crc kubenswrapper[4792]: I0929 19:16:54.950101 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8d8f74d0-be39-457e-ad50-c21d43cc942e-run-httpd\") pod \"swift-proxy-5dd4fd546c-9hwf9\" (UID: \"8d8f74d0-be39-457e-ad50-c21d43cc942e\") " pod="openstack/swift-proxy-5dd4fd546c-9hwf9" Sep 29 19:16:54 crc kubenswrapper[4792]: I0929 19:16:54.950122 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/8d8f74d0-be39-457e-ad50-c21d43cc942e-internal-tls-certs\") pod \"swift-proxy-5dd4fd546c-9hwf9\" (UID: \"8d8f74d0-be39-457e-ad50-c21d43cc942e\") " pod="openstack/swift-proxy-5dd4fd546c-9hwf9" Sep 29 19:16:54 crc kubenswrapper[4792]: I0929 19:16:54.950302 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/8d8f74d0-be39-457e-ad50-c21d43cc942e-public-tls-certs\") pod \"swift-proxy-5dd4fd546c-9hwf9\" (UID: \"8d8f74d0-be39-457e-ad50-c21d43cc942e\") " pod="openstack/swift-proxy-5dd4fd546c-9hwf9" Sep 29 19:16:55 crc kubenswrapper[4792]: I0929 19:16:55.052085 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8d8f74d0-be39-457e-ad50-c21d43cc942e-combined-ca-bundle\") pod \"swift-proxy-5dd4fd546c-9hwf9\" (UID: \"8d8f74d0-be39-457e-ad50-c21d43cc942e\") " pod="openstack/swift-proxy-5dd4fd546c-9hwf9" Sep 29 19:16:55 crc kubenswrapper[4792]: I0929 19:16:55.052738 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/8d8f74d0-be39-457e-ad50-c21d43cc942e-etc-swift\") pod \"swift-proxy-5dd4fd546c-9hwf9\" (UID: \"8d8f74d0-be39-457e-ad50-c21d43cc942e\") " pod="openstack/swift-proxy-5dd4fd546c-9hwf9" Sep 29 19:16:55 crc kubenswrapper[4792]: I0929 19:16:55.052774 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8d8f74d0-be39-457e-ad50-c21d43cc942e-log-httpd\") pod \"swift-proxy-5dd4fd546c-9hwf9\" (UID: \"8d8f74d0-be39-457e-ad50-c21d43cc942e\") " pod="openstack/swift-proxy-5dd4fd546c-9hwf9" Sep 29 19:16:55 crc kubenswrapper[4792]: I0929 19:16:55.052820 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8d8f74d0-be39-457e-ad50-c21d43cc942e-config-data\") pod \"swift-proxy-5dd4fd546c-9hwf9\" (UID: \"8d8f74d0-be39-457e-ad50-c21d43cc942e\") " pod="openstack/swift-proxy-5dd4fd546c-9hwf9" Sep 29 19:16:55 crc kubenswrapper[4792]: I0929 19:16:55.052863 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8d8f74d0-be39-457e-ad50-c21d43cc942e-run-httpd\") pod \"swift-proxy-5dd4fd546c-9hwf9\" (UID: \"8d8f74d0-be39-457e-ad50-c21d43cc942e\") " pod="openstack/swift-proxy-5dd4fd546c-9hwf9" Sep 29 19:16:55 crc kubenswrapper[4792]: I0929 19:16:55.052882 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/8d8f74d0-be39-457e-ad50-c21d43cc942e-internal-tls-certs\") pod \"swift-proxy-5dd4fd546c-9hwf9\" (UID: \"8d8f74d0-be39-457e-ad50-c21d43cc942e\") " pod="openstack/swift-proxy-5dd4fd546c-9hwf9" Sep 29 19:16:55 crc kubenswrapper[4792]: I0929 19:16:55.052954 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/8d8f74d0-be39-457e-ad50-c21d43cc942e-public-tls-certs\") pod \"swift-proxy-5dd4fd546c-9hwf9\" (UID: \"8d8f74d0-be39-457e-ad50-c21d43cc942e\") " pod="openstack/swift-proxy-5dd4fd546c-9hwf9" Sep 29 19:16:55 crc kubenswrapper[4792]: I0929 19:16:55.053024 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gwfjb\" (UniqueName: \"kubernetes.io/projected/8d8f74d0-be39-457e-ad50-c21d43cc942e-kube-api-access-gwfjb\") pod \"swift-proxy-5dd4fd546c-9hwf9\" (UID: \"8d8f74d0-be39-457e-ad50-c21d43cc942e\") " pod="openstack/swift-proxy-5dd4fd546c-9hwf9" Sep 29 19:16:55 crc kubenswrapper[4792]: I0929 19:16:55.053563 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8d8f74d0-be39-457e-ad50-c21d43cc942e-run-httpd\") pod \"swift-proxy-5dd4fd546c-9hwf9\" (UID: \"8d8f74d0-be39-457e-ad50-c21d43cc942e\") " pod="openstack/swift-proxy-5dd4fd546c-9hwf9" Sep 29 19:16:55 crc kubenswrapper[4792]: I0929 19:16:55.053667 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8d8f74d0-be39-457e-ad50-c21d43cc942e-log-httpd\") pod \"swift-proxy-5dd4fd546c-9hwf9\" (UID: \"8d8f74d0-be39-457e-ad50-c21d43cc942e\") " pod="openstack/swift-proxy-5dd4fd546c-9hwf9" Sep 29 19:16:55 crc kubenswrapper[4792]: I0929 19:16:55.059353 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/8d8f74d0-be39-457e-ad50-c21d43cc942e-public-tls-certs\") pod \"swift-proxy-5dd4fd546c-9hwf9\" (UID: \"8d8f74d0-be39-457e-ad50-c21d43cc942e\") " pod="openstack/swift-proxy-5dd4fd546c-9hwf9" Sep 29 19:16:55 crc kubenswrapper[4792]: I0929 19:16:55.065567 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/8d8f74d0-be39-457e-ad50-c21d43cc942e-internal-tls-certs\") pod \"swift-proxy-5dd4fd546c-9hwf9\" (UID: \"8d8f74d0-be39-457e-ad50-c21d43cc942e\") " pod="openstack/swift-proxy-5dd4fd546c-9hwf9" Sep 29 19:16:55 crc kubenswrapper[4792]: I0929 19:16:55.074320 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8d8f74d0-be39-457e-ad50-c21d43cc942e-config-data\") pod \"swift-proxy-5dd4fd546c-9hwf9\" (UID: \"8d8f74d0-be39-457e-ad50-c21d43cc942e\") " pod="openstack/swift-proxy-5dd4fd546c-9hwf9" Sep 29 19:16:55 crc kubenswrapper[4792]: I0929 19:16:55.075187 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8d8f74d0-be39-457e-ad50-c21d43cc942e-combined-ca-bundle\") pod \"swift-proxy-5dd4fd546c-9hwf9\" (UID: \"8d8f74d0-be39-457e-ad50-c21d43cc942e\") " pod="openstack/swift-proxy-5dd4fd546c-9hwf9" Sep 29 19:16:55 crc kubenswrapper[4792]: I0929 19:16:55.078484 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gwfjb\" (UniqueName: \"kubernetes.io/projected/8d8f74d0-be39-457e-ad50-c21d43cc942e-kube-api-access-gwfjb\") pod \"swift-proxy-5dd4fd546c-9hwf9\" (UID: \"8d8f74d0-be39-457e-ad50-c21d43cc942e\") " pod="openstack/swift-proxy-5dd4fd546c-9hwf9" Sep 29 19:16:55 crc kubenswrapper[4792]: I0929 19:16:55.078854 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/8d8f74d0-be39-457e-ad50-c21d43cc942e-etc-swift\") pod \"swift-proxy-5dd4fd546c-9hwf9\" (UID: \"8d8f74d0-be39-457e-ad50-c21d43cc942e\") " pod="openstack/swift-proxy-5dd4fd546c-9hwf9" Sep 29 19:16:55 crc kubenswrapper[4792]: I0929 19:16:55.082402 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-proxy-5dd4fd546c-9hwf9" Sep 29 19:16:55 crc kubenswrapper[4792]: I0929 19:16:55.800908 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-proxy-5dd4fd546c-9hwf9"] Sep 29 19:16:55 crc kubenswrapper[4792]: W0929 19:16:55.814522 4792 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8d8f74d0_be39_457e_ad50_c21d43cc942e.slice/crio-59f96b31d889c524ab5ef2d226aaeed67b7ba7f7fc045fc0b8918bb5f1bdebae WatchSource:0}: Error finding container 59f96b31d889c524ab5ef2d226aaeed67b7ba7f7fc045fc0b8918bb5f1bdebae: Status 404 returned error can't find the container with id 59f96b31d889c524ab5ef2d226aaeed67b7ba7f7fc045fc0b8918bb5f1bdebae Sep 29 19:16:56 crc kubenswrapper[4792]: I0929 19:16:56.256278 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-5dd4fd546c-9hwf9" event={"ID":"8d8f74d0-be39-457e-ad50-c21d43cc942e","Type":"ContainerStarted","Data":"66224458d0684de27410f4a97d0aeb919950d801e01125b5c95250517ccd1dd3"} Sep 29 19:16:56 crc kubenswrapper[4792]: I0929 19:16:56.256691 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-5dd4fd546c-9hwf9" event={"ID":"8d8f74d0-be39-457e-ad50-c21d43cc942e","Type":"ContainerStarted","Data":"59f96b31d889c524ab5ef2d226aaeed67b7ba7f7fc045fc0b8918bb5f1bdebae"} Sep 29 19:16:57 crc kubenswrapper[4792]: I0929 19:16:57.274595 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Sep 29 19:16:57 crc kubenswrapper[4792]: I0929 19:16:57.275277 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="a38f93d7-b748-4e1f-beff-e89168177090" containerName="ceilometer-central-agent" containerID="cri-o://c2333651cec5d27d20051349a4ed6406532cbb246ab58bf74f004cf9da156d8a" gracePeriod=30 Sep 29 19:16:57 crc kubenswrapper[4792]: I0929 19:16:57.275681 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="a38f93d7-b748-4e1f-beff-e89168177090" containerName="proxy-httpd" containerID="cri-o://ce3cb60d2edb606aed366ce6ca6ef0a742440836731698d163e6693f5a6c1c06" gracePeriod=30 Sep 29 19:16:57 crc kubenswrapper[4792]: I0929 19:16:57.275725 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="a38f93d7-b748-4e1f-beff-e89168177090" containerName="sg-core" containerID="cri-o://898d55096687f9fd3bcc15e938082b53cc537880479c2ec5b2c965b0327fae5f" gracePeriod=30 Sep 29 19:16:57 crc kubenswrapper[4792]: I0929 19:16:57.275758 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="a38f93d7-b748-4e1f-beff-e89168177090" containerName="ceilometer-notification-agent" containerID="cri-o://b7dccfec0a9e8f86078b8489c984facdbf211ea443e4cff300288d3c30781ba2" gracePeriod=30 Sep 29 19:16:57 crc kubenswrapper[4792]: I0929 19:16:57.284454 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-5dd4fd546c-9hwf9" event={"ID":"8d8f74d0-be39-457e-ad50-c21d43cc942e","Type":"ContainerStarted","Data":"602ce956a4b9914bc76c2eaa88155c5dd668b73db831f918a891707a5d1ba05f"} Sep 29 19:16:57 crc kubenswrapper[4792]: I0929 19:16:57.284651 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/swift-proxy-5dd4fd546c-9hwf9" Sep 29 19:16:57 crc kubenswrapper[4792]: I0929 19:16:57.284697 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/swift-proxy-5dd4fd546c-9hwf9" Sep 29 19:16:57 crc kubenswrapper[4792]: I0929 19:16:57.312479 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/swift-proxy-5dd4fd546c-9hwf9" podStartSLOduration=3.312459812 podStartE2EDuration="3.312459812s" podCreationTimestamp="2025-09-29 19:16:54 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 19:16:57.304814558 +0000 UTC m=+1229.298121964" watchObservedRunningTime="2025-09-29 19:16:57.312459812 +0000 UTC m=+1229.305767198" Sep 29 19:16:58 crc kubenswrapper[4792]: I0929 19:16:58.314333 4792 generic.go:334] "Generic (PLEG): container finished" podID="a38f93d7-b748-4e1f-beff-e89168177090" containerID="ce3cb60d2edb606aed366ce6ca6ef0a742440836731698d163e6693f5a6c1c06" exitCode=0 Sep 29 19:16:58 crc kubenswrapper[4792]: I0929 19:16:58.314363 4792 generic.go:334] "Generic (PLEG): container finished" podID="a38f93d7-b748-4e1f-beff-e89168177090" containerID="898d55096687f9fd3bcc15e938082b53cc537880479c2ec5b2c965b0327fae5f" exitCode=2 Sep 29 19:16:58 crc kubenswrapper[4792]: I0929 19:16:58.314370 4792 generic.go:334] "Generic (PLEG): container finished" podID="a38f93d7-b748-4e1f-beff-e89168177090" containerID="b7dccfec0a9e8f86078b8489c984facdbf211ea443e4cff300288d3c30781ba2" exitCode=0 Sep 29 19:16:58 crc kubenswrapper[4792]: I0929 19:16:58.314378 4792 generic.go:334] "Generic (PLEG): container finished" podID="a38f93d7-b748-4e1f-beff-e89168177090" containerID="c2333651cec5d27d20051349a4ed6406532cbb246ab58bf74f004cf9da156d8a" exitCode=0 Sep 29 19:16:58 crc kubenswrapper[4792]: I0929 19:16:58.315291 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a38f93d7-b748-4e1f-beff-e89168177090","Type":"ContainerDied","Data":"ce3cb60d2edb606aed366ce6ca6ef0a742440836731698d163e6693f5a6c1c06"} Sep 29 19:16:58 crc kubenswrapper[4792]: I0929 19:16:58.315316 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a38f93d7-b748-4e1f-beff-e89168177090","Type":"ContainerDied","Data":"898d55096687f9fd3bcc15e938082b53cc537880479c2ec5b2c965b0327fae5f"} Sep 29 19:16:58 crc kubenswrapper[4792]: I0929 19:16:58.315326 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a38f93d7-b748-4e1f-beff-e89168177090","Type":"ContainerDied","Data":"b7dccfec0a9e8f86078b8489c984facdbf211ea443e4cff300288d3c30781ba2"} Sep 29 19:16:58 crc kubenswrapper[4792]: I0929 19:16:58.315335 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a38f93d7-b748-4e1f-beff-e89168177090","Type":"ContainerDied","Data":"c2333651cec5d27d20051349a4ed6406532cbb246ab58bf74f004cf9da156d8a"} Sep 29 19:16:58 crc kubenswrapper[4792]: I0929 19:16:58.774107 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-scheduler-0" Sep 29 19:16:59 crc kubenswrapper[4792]: I0929 19:16:59.617387 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Sep 29 19:16:59 crc kubenswrapper[4792]: I0929 19:16:59.618478 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="ffd2c1db-2c19-492f-8783-f03f235013da" containerName="glance-log" containerID="cri-o://803f4d15c0202056158ec73d27818f3ea16ced3e14acb9c68a9e4775c034a304" gracePeriod=30 Sep 29 19:16:59 crc kubenswrapper[4792]: I0929 19:16:59.618543 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="ffd2c1db-2c19-492f-8783-f03f235013da" containerName="glance-httpd" containerID="cri-o://296697811ab6aba68cfb065cda6c785c1c9a617b06fde3730edfd00ed8afd010" gracePeriod=30 Sep 29 19:17:00 crc kubenswrapper[4792]: I0929 19:17:00.335573 4792 generic.go:334] "Generic (PLEG): container finished" podID="ffd2c1db-2c19-492f-8783-f03f235013da" containerID="803f4d15c0202056158ec73d27818f3ea16ced3e14acb9c68a9e4775c034a304" exitCode=143 Sep 29 19:17:00 crc kubenswrapper[4792]: I0929 19:17:00.335610 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"ffd2c1db-2c19-492f-8783-f03f235013da","Type":"ContainerDied","Data":"803f4d15c0202056158ec73d27818f3ea16ced3e14acb9c68a9e4775c034a304"} Sep 29 19:17:00 crc kubenswrapper[4792]: I0929 19:17:00.603037 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Sep 29 19:17:00 crc kubenswrapper[4792]: I0929 19:17:00.603541 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="cf2861f4-a8d6-4c0e-bb03-bd2c51def90a" containerName="glance-log" containerID="cri-o://f9c1c5f9bbf627d9bfed230d14685cf3c86bce16c947782b80e9d155d4580e61" gracePeriod=30 Sep 29 19:17:00 crc kubenswrapper[4792]: I0929 19:17:00.603652 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="cf2861f4-a8d6-4c0e-bb03-bd2c51def90a" containerName="glance-httpd" containerID="cri-o://c538a8a06bd4d6b77b452474e62bd5618a1cf99a8a20eb83f58d03a2a24f4cdf" gracePeriod=30 Sep 29 19:17:01 crc kubenswrapper[4792]: I0929 19:17:01.347352 4792 generic.go:334] "Generic (PLEG): container finished" podID="cf2861f4-a8d6-4c0e-bb03-bd2c51def90a" containerID="f9c1c5f9bbf627d9bfed230d14685cf3c86bce16c947782b80e9d155d4580e61" exitCode=143 Sep 29 19:17:01 crc kubenswrapper[4792]: I0929 19:17:01.347402 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"cf2861f4-a8d6-4c0e-bb03-bd2c51def90a","Type":"ContainerDied","Data":"f9c1c5f9bbf627d9bfed230d14685cf3c86bce16c947782b80e9d155d4580e61"} Sep 29 19:17:03 crc kubenswrapper[4792]: I0929 19:17:03.369582 4792 generic.go:334] "Generic (PLEG): container finished" podID="ffd2c1db-2c19-492f-8783-f03f235013da" containerID="296697811ab6aba68cfb065cda6c785c1c9a617b06fde3730edfd00ed8afd010" exitCode=0 Sep 29 19:17:03 crc kubenswrapper[4792]: I0929 19:17:03.369839 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"ffd2c1db-2c19-492f-8783-f03f235013da","Type":"ContainerDied","Data":"296697811ab6aba68cfb065cda6c785c1c9a617b06fde3730edfd00ed8afd010"} Sep 29 19:17:04 crc kubenswrapper[4792]: I0929 19:17:04.381972 4792 generic.go:334] "Generic (PLEG): container finished" podID="cf2861f4-a8d6-4c0e-bb03-bd2c51def90a" containerID="c538a8a06bd4d6b77b452474e62bd5618a1cf99a8a20eb83f58d03a2a24f4cdf" exitCode=0 Sep 29 19:17:04 crc kubenswrapper[4792]: I0929 19:17:04.382221 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"cf2861f4-a8d6-4c0e-bb03-bd2c51def90a","Type":"ContainerDied","Data":"c538a8a06bd4d6b77b452474e62bd5618a1cf99a8a20eb83f58d03a2a24f4cdf"} Sep 29 19:17:04 crc kubenswrapper[4792]: I0929 19:17:04.765432 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 29 19:17:04 crc kubenswrapper[4792]: I0929 19:17:04.837244 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Sep 29 19:17:04 crc kubenswrapper[4792]: I0929 19:17:04.850449 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/ffd2c1db-2c19-492f-8783-f03f235013da-httpd-run\") pod \"ffd2c1db-2c19-492f-8783-f03f235013da\" (UID: \"ffd2c1db-2c19-492f-8783-f03f235013da\") " Sep 29 19:17:04 crc kubenswrapper[4792]: I0929 19:17:04.850685 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a38f93d7-b748-4e1f-beff-e89168177090-run-httpd\") pod \"a38f93d7-b748-4e1f-beff-e89168177090\" (UID: \"a38f93d7-b748-4e1f-beff-e89168177090\") " Sep 29 19:17:04 crc kubenswrapper[4792]: I0929 19:17:04.850726 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a38f93d7-b748-4e1f-beff-e89168177090-log-httpd\") pod \"a38f93d7-b748-4e1f-beff-e89168177090\" (UID: \"a38f93d7-b748-4e1f-beff-e89168177090\") " Sep 29 19:17:04 crc kubenswrapper[4792]: I0929 19:17:04.850770 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ffd2c1db-2c19-492f-8783-f03f235013da-scripts\") pod \"ffd2c1db-2c19-492f-8783-f03f235013da\" (UID: \"ffd2c1db-2c19-492f-8783-f03f235013da\") " Sep 29 19:17:04 crc kubenswrapper[4792]: I0929 19:17:04.850794 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cnhzc\" (UniqueName: \"kubernetes.io/projected/ffd2c1db-2c19-492f-8783-f03f235013da-kube-api-access-cnhzc\") pod \"ffd2c1db-2c19-492f-8783-f03f235013da\" (UID: \"ffd2c1db-2c19-492f-8783-f03f235013da\") " Sep 29 19:17:04 crc kubenswrapper[4792]: I0929 19:17:04.850809 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a38f93d7-b748-4e1f-beff-e89168177090-config-data\") pod \"a38f93d7-b748-4e1f-beff-e89168177090\" (UID: \"a38f93d7-b748-4e1f-beff-e89168177090\") " Sep 29 19:17:04 crc kubenswrapper[4792]: I0929 19:17:04.851042 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ffd2c1db-2c19-492f-8783-f03f235013da-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "ffd2c1db-2c19-492f-8783-f03f235013da" (UID: "ffd2c1db-2c19-492f-8783-f03f235013da"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 19:17:04 crc kubenswrapper[4792]: I0929 19:17:04.851170 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ffd2c1db-2c19-492f-8783-f03f235013da-combined-ca-bundle\") pod \"ffd2c1db-2c19-492f-8783-f03f235013da\" (UID: \"ffd2c1db-2c19-492f-8783-f03f235013da\") " Sep 29 19:17:04 crc kubenswrapper[4792]: I0929 19:17:04.851203 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ffd2c1db-2c19-492f-8783-f03f235013da-logs\") pod \"ffd2c1db-2c19-492f-8783-f03f235013da\" (UID: \"ffd2c1db-2c19-492f-8783-f03f235013da\") " Sep 29 19:17:04 crc kubenswrapper[4792]: I0929 19:17:04.851229 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a38f93d7-b748-4e1f-beff-e89168177090-combined-ca-bundle\") pod \"a38f93d7-b748-4e1f-beff-e89168177090\" (UID: \"a38f93d7-b748-4e1f-beff-e89168177090\") " Sep 29 19:17:04 crc kubenswrapper[4792]: I0929 19:17:04.851258 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"ffd2c1db-2c19-492f-8783-f03f235013da\" (UID: \"ffd2c1db-2c19-492f-8783-f03f235013da\") " Sep 29 19:17:04 crc kubenswrapper[4792]: I0929 19:17:04.851283 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/a38f93d7-b748-4e1f-beff-e89168177090-sg-core-conf-yaml\") pod \"a38f93d7-b748-4e1f-beff-e89168177090\" (UID: \"a38f93d7-b748-4e1f-beff-e89168177090\") " Sep 29 19:17:04 crc kubenswrapper[4792]: I0929 19:17:04.851299 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a38f93d7-b748-4e1f-beff-e89168177090-scripts\") pod \"a38f93d7-b748-4e1f-beff-e89168177090\" (UID: \"a38f93d7-b748-4e1f-beff-e89168177090\") " Sep 29 19:17:04 crc kubenswrapper[4792]: I0929 19:17:04.851314 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/ffd2c1db-2c19-492f-8783-f03f235013da-public-tls-certs\") pod \"ffd2c1db-2c19-492f-8783-f03f235013da\" (UID: \"ffd2c1db-2c19-492f-8783-f03f235013da\") " Sep 29 19:17:04 crc kubenswrapper[4792]: I0929 19:17:04.851519 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ffd2c1db-2c19-492f-8783-f03f235013da-config-data\") pod \"ffd2c1db-2c19-492f-8783-f03f235013da\" (UID: \"ffd2c1db-2c19-492f-8783-f03f235013da\") " Sep 29 19:17:04 crc kubenswrapper[4792]: I0929 19:17:04.851565 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lqklb\" (UniqueName: \"kubernetes.io/projected/a38f93d7-b748-4e1f-beff-e89168177090-kube-api-access-lqklb\") pod \"a38f93d7-b748-4e1f-beff-e89168177090\" (UID: \"a38f93d7-b748-4e1f-beff-e89168177090\") " Sep 29 19:17:04 crc kubenswrapper[4792]: I0929 19:17:04.853284 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ffd2c1db-2c19-492f-8783-f03f235013da-logs" (OuterVolumeSpecName: "logs") pod "ffd2c1db-2c19-492f-8783-f03f235013da" (UID: "ffd2c1db-2c19-492f-8783-f03f235013da"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 19:17:04 crc kubenswrapper[4792]: I0929 19:17:04.853800 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a38f93d7-b748-4e1f-beff-e89168177090-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "a38f93d7-b748-4e1f-beff-e89168177090" (UID: "a38f93d7-b748-4e1f-beff-e89168177090"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 19:17:04 crc kubenswrapper[4792]: I0929 19:17:04.854101 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a38f93d7-b748-4e1f-beff-e89168177090-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "a38f93d7-b748-4e1f-beff-e89168177090" (UID: "a38f93d7-b748-4e1f-beff-e89168177090"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 19:17:04 crc kubenswrapper[4792]: I0929 19:17:04.866169 4792 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a38f93d7-b748-4e1f-beff-e89168177090-run-httpd\") on node \"crc\" DevicePath \"\"" Sep 29 19:17:04 crc kubenswrapper[4792]: I0929 19:17:04.866540 4792 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a38f93d7-b748-4e1f-beff-e89168177090-log-httpd\") on node \"crc\" DevicePath \"\"" Sep 29 19:17:04 crc kubenswrapper[4792]: I0929 19:17:04.866584 4792 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ffd2c1db-2c19-492f-8783-f03f235013da-logs\") on node \"crc\" DevicePath \"\"" Sep 29 19:17:04 crc kubenswrapper[4792]: I0929 19:17:04.866593 4792 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/ffd2c1db-2c19-492f-8783-f03f235013da-httpd-run\") on node \"crc\" DevicePath \"\"" Sep 29 19:17:04 crc kubenswrapper[4792]: I0929 19:17:04.882430 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage04-crc" (OuterVolumeSpecName: "glance") pod "ffd2c1db-2c19-492f-8783-f03f235013da" (UID: "ffd2c1db-2c19-492f-8783-f03f235013da"). InnerVolumeSpecName "local-storage04-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Sep 29 19:17:04 crc kubenswrapper[4792]: I0929 19:17:04.882602 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a38f93d7-b748-4e1f-beff-e89168177090-kube-api-access-lqklb" (OuterVolumeSpecName: "kube-api-access-lqklb") pod "a38f93d7-b748-4e1f-beff-e89168177090" (UID: "a38f93d7-b748-4e1f-beff-e89168177090"). InnerVolumeSpecName "kube-api-access-lqklb". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:17:04 crc kubenswrapper[4792]: I0929 19:17:04.886270 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ffd2c1db-2c19-492f-8783-f03f235013da-scripts" (OuterVolumeSpecName: "scripts") pod "ffd2c1db-2c19-492f-8783-f03f235013da" (UID: "ffd2c1db-2c19-492f-8783-f03f235013da"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:17:04 crc kubenswrapper[4792]: I0929 19:17:04.919209 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ffd2c1db-2c19-492f-8783-f03f235013da-kube-api-access-cnhzc" (OuterVolumeSpecName: "kube-api-access-cnhzc") pod "ffd2c1db-2c19-492f-8783-f03f235013da" (UID: "ffd2c1db-2c19-492f-8783-f03f235013da"). InnerVolumeSpecName "kube-api-access-cnhzc". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:17:04 crc kubenswrapper[4792]: I0929 19:17:04.928940 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a38f93d7-b748-4e1f-beff-e89168177090-scripts" (OuterVolumeSpecName: "scripts") pod "a38f93d7-b748-4e1f-beff-e89168177090" (UID: "a38f93d7-b748-4e1f-beff-e89168177090"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:17:04 crc kubenswrapper[4792]: I0929 19:17:04.968360 4792 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a38f93d7-b748-4e1f-beff-e89168177090-scripts\") on node \"crc\" DevicePath \"\"" Sep 29 19:17:04 crc kubenswrapper[4792]: I0929 19:17:04.968392 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lqklb\" (UniqueName: \"kubernetes.io/projected/a38f93d7-b748-4e1f-beff-e89168177090-kube-api-access-lqklb\") on node \"crc\" DevicePath \"\"" Sep 29 19:17:04 crc kubenswrapper[4792]: I0929 19:17:04.968403 4792 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ffd2c1db-2c19-492f-8783-f03f235013da-scripts\") on node \"crc\" DevicePath \"\"" Sep 29 19:17:04 crc kubenswrapper[4792]: I0929 19:17:04.968411 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cnhzc\" (UniqueName: \"kubernetes.io/projected/ffd2c1db-2c19-492f-8783-f03f235013da-kube-api-access-cnhzc\") on node \"crc\" DevicePath \"\"" Sep 29 19:17:04 crc kubenswrapper[4792]: I0929 19:17:04.968434 4792 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") on node \"crc\" " Sep 29 19:17:04 crc kubenswrapper[4792]: I0929 19:17:04.977916 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Sep 29 19:17:05 crc kubenswrapper[4792]: I0929 19:17:05.000576 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ffd2c1db-2c19-492f-8783-f03f235013da-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "ffd2c1db-2c19-492f-8783-f03f235013da" (UID: "ffd2c1db-2c19-492f-8783-f03f235013da"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:17:05 crc kubenswrapper[4792]: I0929 19:17:05.012235 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a38f93d7-b748-4e1f-beff-e89168177090-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "a38f93d7-b748-4e1f-beff-e89168177090" (UID: "a38f93d7-b748-4e1f-beff-e89168177090"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:17:05 crc kubenswrapper[4792]: I0929 19:17:05.027969 4792 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage04-crc" (UniqueName: "kubernetes.io/local-volume/local-storage04-crc") on node "crc" Sep 29 19:17:05 crc kubenswrapper[4792]: I0929 19:17:05.044503 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ffd2c1db-2c19-492f-8783-f03f235013da-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "ffd2c1db-2c19-492f-8783-f03f235013da" (UID: "ffd2c1db-2c19-492f-8783-f03f235013da"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:17:05 crc kubenswrapper[4792]: I0929 19:17:05.069574 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cf2861f4-a8d6-4c0e-bb03-bd2c51def90a-config-data\") pod \"cf2861f4-a8d6-4c0e-bb03-bd2c51def90a\" (UID: \"cf2861f4-a8d6-4c0e-bb03-bd2c51def90a\") " Sep 29 19:17:05 crc kubenswrapper[4792]: I0929 19:17:05.069614 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/cf2861f4-a8d6-4c0e-bb03-bd2c51def90a-internal-tls-certs\") pod \"cf2861f4-a8d6-4c0e-bb03-bd2c51def90a\" (UID: \"cf2861f4-a8d6-4c0e-bb03-bd2c51def90a\") " Sep 29 19:17:05 crc kubenswrapper[4792]: I0929 19:17:05.069637 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cf2861f4-a8d6-4c0e-bb03-bd2c51def90a-combined-ca-bundle\") pod \"cf2861f4-a8d6-4c0e-bb03-bd2c51def90a\" (UID: \"cf2861f4-a8d6-4c0e-bb03-bd2c51def90a\") " Sep 29 19:17:05 crc kubenswrapper[4792]: I0929 19:17:05.069661 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cf2861f4-a8d6-4c0e-bb03-bd2c51def90a-scripts\") pod \"cf2861f4-a8d6-4c0e-bb03-bd2c51def90a\" (UID: \"cf2861f4-a8d6-4c0e-bb03-bd2c51def90a\") " Sep 29 19:17:05 crc kubenswrapper[4792]: I0929 19:17:05.069685 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"cf2861f4-a8d6-4c0e-bb03-bd2c51def90a\" (UID: \"cf2861f4-a8d6-4c0e-bb03-bd2c51def90a\") " Sep 29 19:17:05 crc kubenswrapper[4792]: I0929 19:17:05.069705 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ndvh2\" (UniqueName: \"kubernetes.io/projected/cf2861f4-a8d6-4c0e-bb03-bd2c51def90a-kube-api-access-ndvh2\") pod \"cf2861f4-a8d6-4c0e-bb03-bd2c51def90a\" (UID: \"cf2861f4-a8d6-4c0e-bb03-bd2c51def90a\") " Sep 29 19:17:05 crc kubenswrapper[4792]: I0929 19:17:05.069990 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/cf2861f4-a8d6-4c0e-bb03-bd2c51def90a-logs\") pod \"cf2861f4-a8d6-4c0e-bb03-bd2c51def90a\" (UID: \"cf2861f4-a8d6-4c0e-bb03-bd2c51def90a\") " Sep 29 19:17:05 crc kubenswrapper[4792]: I0929 19:17:05.070032 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/cf2861f4-a8d6-4c0e-bb03-bd2c51def90a-httpd-run\") pod \"cf2861f4-a8d6-4c0e-bb03-bd2c51def90a\" (UID: \"cf2861f4-a8d6-4c0e-bb03-bd2c51def90a\") " Sep 29 19:17:05 crc kubenswrapper[4792]: I0929 19:17:05.070482 4792 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ffd2c1db-2c19-492f-8783-f03f235013da-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 19:17:05 crc kubenswrapper[4792]: I0929 19:17:05.070504 4792 reconciler_common.go:293] "Volume detached for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") on node \"crc\" DevicePath \"\"" Sep 29 19:17:05 crc kubenswrapper[4792]: I0929 19:17:05.070514 4792 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/a38f93d7-b748-4e1f-beff-e89168177090-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Sep 29 19:17:05 crc kubenswrapper[4792]: I0929 19:17:05.070523 4792 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/ffd2c1db-2c19-492f-8783-f03f235013da-public-tls-certs\") on node \"crc\" DevicePath \"\"" Sep 29 19:17:05 crc kubenswrapper[4792]: I0929 19:17:05.072483 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cf2861f4-a8d6-4c0e-bb03-bd2c51def90a-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "cf2861f4-a8d6-4c0e-bb03-bd2c51def90a" (UID: "cf2861f4-a8d6-4c0e-bb03-bd2c51def90a"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 19:17:05 crc kubenswrapper[4792]: I0929 19:17:05.073538 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cf2861f4-a8d6-4c0e-bb03-bd2c51def90a-logs" (OuterVolumeSpecName: "logs") pod "cf2861f4-a8d6-4c0e-bb03-bd2c51def90a" (UID: "cf2861f4-a8d6-4c0e-bb03-bd2c51def90a"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 19:17:05 crc kubenswrapper[4792]: I0929 19:17:05.078820 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage12-crc" (OuterVolumeSpecName: "glance") pod "cf2861f4-a8d6-4c0e-bb03-bd2c51def90a" (UID: "cf2861f4-a8d6-4c0e-bb03-bd2c51def90a"). InnerVolumeSpecName "local-storage12-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Sep 29 19:17:05 crc kubenswrapper[4792]: I0929 19:17:05.079688 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cf2861f4-a8d6-4c0e-bb03-bd2c51def90a-kube-api-access-ndvh2" (OuterVolumeSpecName: "kube-api-access-ndvh2") pod "cf2861f4-a8d6-4c0e-bb03-bd2c51def90a" (UID: "cf2861f4-a8d6-4c0e-bb03-bd2c51def90a"). InnerVolumeSpecName "kube-api-access-ndvh2". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:17:05 crc kubenswrapper[4792]: I0929 19:17:05.081931 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cf2861f4-a8d6-4c0e-bb03-bd2c51def90a-scripts" (OuterVolumeSpecName: "scripts") pod "cf2861f4-a8d6-4c0e-bb03-bd2c51def90a" (UID: "cf2861f4-a8d6-4c0e-bb03-bd2c51def90a"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:17:05 crc kubenswrapper[4792]: I0929 19:17:05.082572 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a38f93d7-b748-4e1f-beff-e89168177090-config-data" (OuterVolumeSpecName: "config-data") pod "a38f93d7-b748-4e1f-beff-e89168177090" (UID: "a38f93d7-b748-4e1f-beff-e89168177090"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:17:05 crc kubenswrapper[4792]: I0929 19:17:05.091754 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/swift-proxy-5dd4fd546c-9hwf9" Sep 29 19:17:05 crc kubenswrapper[4792]: I0929 19:17:05.099762 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/swift-proxy-5dd4fd546c-9hwf9" Sep 29 19:17:05 crc kubenswrapper[4792]: I0929 19:17:05.102722 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ffd2c1db-2c19-492f-8783-f03f235013da-config-data" (OuterVolumeSpecName: "config-data") pod "ffd2c1db-2c19-492f-8783-f03f235013da" (UID: "ffd2c1db-2c19-492f-8783-f03f235013da"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:17:05 crc kubenswrapper[4792]: I0929 19:17:05.110098 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a38f93d7-b748-4e1f-beff-e89168177090-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "a38f93d7-b748-4e1f-beff-e89168177090" (UID: "a38f93d7-b748-4e1f-beff-e89168177090"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:17:05 crc kubenswrapper[4792]: I0929 19:17:05.168726 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cf2861f4-a8d6-4c0e-bb03-bd2c51def90a-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "cf2861f4-a8d6-4c0e-bb03-bd2c51def90a" (UID: "cf2861f4-a8d6-4c0e-bb03-bd2c51def90a"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:17:05 crc kubenswrapper[4792]: I0929 19:17:05.172558 4792 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/cf2861f4-a8d6-4c0e-bb03-bd2c51def90a-httpd-run\") on node \"crc\" DevicePath \"\"" Sep 29 19:17:05 crc kubenswrapper[4792]: I0929 19:17:05.172587 4792 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ffd2c1db-2c19-492f-8783-f03f235013da-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 19:17:05 crc kubenswrapper[4792]: I0929 19:17:05.172596 4792 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cf2861f4-a8d6-4c0e-bb03-bd2c51def90a-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 19:17:05 crc kubenswrapper[4792]: I0929 19:17:05.172604 4792 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cf2861f4-a8d6-4c0e-bb03-bd2c51def90a-scripts\") on node \"crc\" DevicePath \"\"" Sep 29 19:17:05 crc kubenswrapper[4792]: I0929 19:17:05.172623 4792 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") on node \"crc\" " Sep 29 19:17:05 crc kubenswrapper[4792]: I0929 19:17:05.172633 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ndvh2\" (UniqueName: \"kubernetes.io/projected/cf2861f4-a8d6-4c0e-bb03-bd2c51def90a-kube-api-access-ndvh2\") on node \"crc\" DevicePath \"\"" Sep 29 19:17:05 crc kubenswrapper[4792]: I0929 19:17:05.172672 4792 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a38f93d7-b748-4e1f-beff-e89168177090-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 19:17:05 crc kubenswrapper[4792]: I0929 19:17:05.172685 4792 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a38f93d7-b748-4e1f-beff-e89168177090-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 19:17:05 crc kubenswrapper[4792]: I0929 19:17:05.172696 4792 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/cf2861f4-a8d6-4c0e-bb03-bd2c51def90a-logs\") on node \"crc\" DevicePath \"\"" Sep 29 19:17:05 crc kubenswrapper[4792]: I0929 19:17:05.177089 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cf2861f4-a8d6-4c0e-bb03-bd2c51def90a-config-data" (OuterVolumeSpecName: "config-data") pod "cf2861f4-a8d6-4c0e-bb03-bd2c51def90a" (UID: "cf2861f4-a8d6-4c0e-bb03-bd2c51def90a"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:17:05 crc kubenswrapper[4792]: I0929 19:17:05.198963 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cf2861f4-a8d6-4c0e-bb03-bd2c51def90a-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "cf2861f4-a8d6-4c0e-bb03-bd2c51def90a" (UID: "cf2861f4-a8d6-4c0e-bb03-bd2c51def90a"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:17:05 crc kubenswrapper[4792]: I0929 19:17:05.237824 4792 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage12-crc" (UniqueName: "kubernetes.io/local-volume/local-storage12-crc") on node "crc" Sep 29 19:17:05 crc kubenswrapper[4792]: I0929 19:17:05.275810 4792 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cf2861f4-a8d6-4c0e-bb03-bd2c51def90a-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 19:17:05 crc kubenswrapper[4792]: I0929 19:17:05.275843 4792 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/cf2861f4-a8d6-4c0e-bb03-bd2c51def90a-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Sep 29 19:17:05 crc kubenswrapper[4792]: I0929 19:17:05.275873 4792 reconciler_common.go:293] "Volume detached for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") on node \"crc\" DevicePath \"\"" Sep 29 19:17:05 crc kubenswrapper[4792]: I0929 19:17:05.391614 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"36d9080e-6ba5-4a59-ac59-21f8a868df0d","Type":"ContainerStarted","Data":"685a3438a9433f4b24e7cbfe1abdc273615331237fa7ab1259d243fa0ecfffca"} Sep 29 19:17:05 crc kubenswrapper[4792]: I0929 19:17:05.394050 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"ffd2c1db-2c19-492f-8783-f03f235013da","Type":"ContainerDied","Data":"96aa30f00143b62ca1e0d28d6cc8c8206861898a50f6f3348a28796f3644dde2"} Sep 29 19:17:05 crc kubenswrapper[4792]: I0929 19:17:05.394086 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Sep 29 19:17:05 crc kubenswrapper[4792]: I0929 19:17:05.394100 4792 scope.go:117] "RemoveContainer" containerID="296697811ab6aba68cfb065cda6c785c1c9a617b06fde3730edfd00ed8afd010" Sep 29 19:17:05 crc kubenswrapper[4792]: I0929 19:17:05.397007 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"cf2861f4-a8d6-4c0e-bb03-bd2c51def90a","Type":"ContainerDied","Data":"7deee0a366d3a3c058795f4201190c88ab6049a482b90a77ca98f34fce04500e"} Sep 29 19:17:05 crc kubenswrapper[4792]: I0929 19:17:05.397123 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Sep 29 19:17:05 crc kubenswrapper[4792]: I0929 19:17:05.402641 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a38f93d7-b748-4e1f-beff-e89168177090","Type":"ContainerDied","Data":"9c1a6d715a4d43a0a6618490d411f20abee29ba7d196166dbc4c8e4998a7039a"} Sep 29 19:17:05 crc kubenswrapper[4792]: I0929 19:17:05.402955 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 29 19:17:05 crc kubenswrapper[4792]: I0929 19:17:05.459316 4792 scope.go:117] "RemoveContainer" containerID="803f4d15c0202056158ec73d27818f3ea16ced3e14acb9c68a9e4775c034a304" Sep 29 19:17:05 crc kubenswrapper[4792]: I0929 19:17:05.466110 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstackclient" podStartSLOduration=2.8914093960000002 podStartE2EDuration="15.466097993s" podCreationTimestamp="2025-09-29 19:16:50 +0000 UTC" firstStartedPulling="2025-09-29 19:16:51.885493419 +0000 UTC m=+1223.878800815" lastFinishedPulling="2025-09-29 19:17:04.460182016 +0000 UTC m=+1236.453489412" observedRunningTime="2025-09-29 19:17:05.427041798 +0000 UTC m=+1237.420349194" watchObservedRunningTime="2025-09-29 19:17:05.466097993 +0000 UTC m=+1237.459405389" Sep 29 19:17:05 crc kubenswrapper[4792]: I0929 19:17:05.476078 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Sep 29 19:17:05 crc kubenswrapper[4792]: I0929 19:17:05.483942 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Sep 29 19:17:05 crc kubenswrapper[4792]: I0929 19:17:05.485399 4792 scope.go:117] "RemoveContainer" containerID="c538a8a06bd4d6b77b452474e62bd5618a1cf99a8a20eb83f58d03a2a24f4cdf" Sep 29 19:17:05 crc kubenswrapper[4792]: I0929 19:17:05.500264 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Sep 29 19:17:05 crc kubenswrapper[4792]: I0929 19:17:05.511751 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-external-api-0"] Sep 29 19:17:05 crc kubenswrapper[4792]: I0929 19:17:05.517726 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Sep 29 19:17:05 crc kubenswrapper[4792]: E0929 19:17:05.518267 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a38f93d7-b748-4e1f-beff-e89168177090" containerName="ceilometer-notification-agent" Sep 29 19:17:05 crc kubenswrapper[4792]: I0929 19:17:05.518381 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="a38f93d7-b748-4e1f-beff-e89168177090" containerName="ceilometer-notification-agent" Sep 29 19:17:05 crc kubenswrapper[4792]: E0929 19:17:05.518459 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a38f93d7-b748-4e1f-beff-e89168177090" containerName="ceilometer-central-agent" Sep 29 19:17:05 crc kubenswrapper[4792]: I0929 19:17:05.518548 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="a38f93d7-b748-4e1f-beff-e89168177090" containerName="ceilometer-central-agent" Sep 29 19:17:05 crc kubenswrapper[4792]: E0929 19:17:05.518617 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ffd2c1db-2c19-492f-8783-f03f235013da" containerName="glance-log" Sep 29 19:17:05 crc kubenswrapper[4792]: I0929 19:17:05.518667 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="ffd2c1db-2c19-492f-8783-f03f235013da" containerName="glance-log" Sep 29 19:17:05 crc kubenswrapper[4792]: E0929 19:17:05.518730 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a38f93d7-b748-4e1f-beff-e89168177090" containerName="sg-core" Sep 29 19:17:05 crc kubenswrapper[4792]: I0929 19:17:05.518781 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="a38f93d7-b748-4e1f-beff-e89168177090" containerName="sg-core" Sep 29 19:17:05 crc kubenswrapper[4792]: E0929 19:17:05.518835 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ffd2c1db-2c19-492f-8783-f03f235013da" containerName="glance-httpd" Sep 29 19:17:05 crc kubenswrapper[4792]: I0929 19:17:05.518902 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="ffd2c1db-2c19-492f-8783-f03f235013da" containerName="glance-httpd" Sep 29 19:17:05 crc kubenswrapper[4792]: E0929 19:17:05.518972 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cf2861f4-a8d6-4c0e-bb03-bd2c51def90a" containerName="glance-httpd" Sep 29 19:17:05 crc kubenswrapper[4792]: I0929 19:17:05.519022 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="cf2861f4-a8d6-4c0e-bb03-bd2c51def90a" containerName="glance-httpd" Sep 29 19:17:05 crc kubenswrapper[4792]: E0929 19:17:05.519081 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cf2861f4-a8d6-4c0e-bb03-bd2c51def90a" containerName="glance-log" Sep 29 19:17:05 crc kubenswrapper[4792]: I0929 19:17:05.519133 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="cf2861f4-a8d6-4c0e-bb03-bd2c51def90a" containerName="glance-log" Sep 29 19:17:05 crc kubenswrapper[4792]: E0929 19:17:05.519193 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a38f93d7-b748-4e1f-beff-e89168177090" containerName="proxy-httpd" Sep 29 19:17:05 crc kubenswrapper[4792]: I0929 19:17:05.519242 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="a38f93d7-b748-4e1f-beff-e89168177090" containerName="proxy-httpd" Sep 29 19:17:05 crc kubenswrapper[4792]: I0929 19:17:05.519465 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="cf2861f4-a8d6-4c0e-bb03-bd2c51def90a" containerName="glance-log" Sep 29 19:17:05 crc kubenswrapper[4792]: I0929 19:17:05.519534 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="a38f93d7-b748-4e1f-beff-e89168177090" containerName="proxy-httpd" Sep 29 19:17:05 crc kubenswrapper[4792]: I0929 19:17:05.519592 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="ffd2c1db-2c19-492f-8783-f03f235013da" containerName="glance-log" Sep 29 19:17:05 crc kubenswrapper[4792]: I0929 19:17:05.519650 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="cf2861f4-a8d6-4c0e-bb03-bd2c51def90a" containerName="glance-httpd" Sep 29 19:17:05 crc kubenswrapper[4792]: I0929 19:17:05.519701 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="a38f93d7-b748-4e1f-beff-e89168177090" containerName="sg-core" Sep 29 19:17:05 crc kubenswrapper[4792]: I0929 19:17:05.519758 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="a38f93d7-b748-4e1f-beff-e89168177090" containerName="ceilometer-central-agent" Sep 29 19:17:05 crc kubenswrapper[4792]: I0929 19:17:05.519809 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="ffd2c1db-2c19-492f-8783-f03f235013da" containerName="glance-httpd" Sep 29 19:17:05 crc kubenswrapper[4792]: I0929 19:17:05.519886 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="a38f93d7-b748-4e1f-beff-e89168177090" containerName="ceilometer-notification-agent" Sep 29 19:17:05 crc kubenswrapper[4792]: I0929 19:17:05.521488 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 29 19:17:05 crc kubenswrapper[4792]: I0929 19:17:05.526188 4792 scope.go:117] "RemoveContainer" containerID="f9c1c5f9bbf627d9bfed230d14685cf3c86bce16c947782b80e9d155d4580e61" Sep 29 19:17:05 crc kubenswrapper[4792]: I0929 19:17:05.532591 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Sep 29 19:17:05 crc kubenswrapper[4792]: I0929 19:17:05.534176 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Sep 29 19:17:05 crc kubenswrapper[4792]: I0929 19:17:05.534358 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Sep 29 19:17:05 crc kubenswrapper[4792]: I0929 19:17:05.547908 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-internal-api-0"] Sep 29 19:17:05 crc kubenswrapper[4792]: I0929 19:17:05.554042 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Sep 29 19:17:05 crc kubenswrapper[4792]: I0929 19:17:05.555581 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Sep 29 19:17:05 crc kubenswrapper[4792]: I0929 19:17:05.558144 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-hgxkm" Sep 29 19:17:05 crc kubenswrapper[4792]: I0929 19:17:05.558350 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-public-svc" Sep 29 19:17:05 crc kubenswrapper[4792]: I0929 19:17:05.558467 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Sep 29 19:17:05 crc kubenswrapper[4792]: I0929 19:17:05.558957 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-scripts" Sep 29 19:17:05 crc kubenswrapper[4792]: I0929 19:17:05.560647 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Sep 29 19:17:05 crc kubenswrapper[4792]: I0929 19:17:05.565347 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Sep 29 19:17:05 crc kubenswrapper[4792]: I0929 19:17:05.566887 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Sep 29 19:17:05 crc kubenswrapper[4792]: I0929 19:17:05.573633 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-internal-svc" Sep 29 19:17:05 crc kubenswrapper[4792]: I0929 19:17:05.573895 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Sep 29 19:17:05 crc kubenswrapper[4792]: I0929 19:17:05.580623 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g7j2m\" (UniqueName: \"kubernetes.io/projected/e77007af-4255-4480-94c7-acb59becda59-kube-api-access-g7j2m\") pod \"ceilometer-0\" (UID: \"e77007af-4255-4480-94c7-acb59becda59\") " pod="openstack/ceilometer-0" Sep 29 19:17:05 crc kubenswrapper[4792]: I0929 19:17:05.580661 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/e77007af-4255-4480-94c7-acb59becda59-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"e77007af-4255-4480-94c7-acb59becda59\") " pod="openstack/ceilometer-0" Sep 29 19:17:05 crc kubenswrapper[4792]: I0929 19:17:05.580683 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e77007af-4255-4480-94c7-acb59becda59-log-httpd\") pod \"ceilometer-0\" (UID: \"e77007af-4255-4480-94c7-acb59becda59\") " pod="openstack/ceilometer-0" Sep 29 19:17:05 crc kubenswrapper[4792]: I0929 19:17:05.580705 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e77007af-4255-4480-94c7-acb59becda59-config-data\") pod \"ceilometer-0\" (UID: \"e77007af-4255-4480-94c7-acb59becda59\") " pod="openstack/ceilometer-0" Sep 29 19:17:05 crc kubenswrapper[4792]: I0929 19:17:05.580719 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e77007af-4255-4480-94c7-acb59becda59-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"e77007af-4255-4480-94c7-acb59becda59\") " pod="openstack/ceilometer-0" Sep 29 19:17:05 crc kubenswrapper[4792]: I0929 19:17:05.580737 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e77007af-4255-4480-94c7-acb59becda59-scripts\") pod \"ceilometer-0\" (UID: \"e77007af-4255-4480-94c7-acb59becda59\") " pod="openstack/ceilometer-0" Sep 29 19:17:05 crc kubenswrapper[4792]: I0929 19:17:05.580774 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e77007af-4255-4480-94c7-acb59becda59-run-httpd\") pod \"ceilometer-0\" (UID: \"e77007af-4255-4480-94c7-acb59becda59\") " pod="openstack/ceilometer-0" Sep 29 19:17:05 crc kubenswrapper[4792]: I0929 19:17:05.609154 4792 scope.go:117] "RemoveContainer" containerID="ce3cb60d2edb606aed366ce6ca6ef0a742440836731698d163e6693f5a6c1c06" Sep 29 19:17:05 crc kubenswrapper[4792]: I0929 19:17:05.611208 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Sep 29 19:17:05 crc kubenswrapper[4792]: I0929 19:17:05.628861 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Sep 29 19:17:05 crc kubenswrapper[4792]: I0929 19:17:05.651799 4792 scope.go:117] "RemoveContainer" containerID="898d55096687f9fd3bcc15e938082b53cc537880479c2ec5b2c965b0327fae5f" Sep 29 19:17:05 crc kubenswrapper[4792]: I0929 19:17:05.672356 4792 scope.go:117] "RemoveContainer" containerID="b7dccfec0a9e8f86078b8489c984facdbf211ea443e4cff300288d3c30781ba2" Sep 29 19:17:05 crc kubenswrapper[4792]: I0929 19:17:05.682084 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a0dfd532-bb4f-4a68-b334-e9dcdd0e2fa2-logs\") pod \"glance-default-external-api-0\" (UID: \"a0dfd532-bb4f-4a68-b334-e9dcdd0e2fa2\") " pod="openstack/glance-default-external-api-0" Sep 29 19:17:05 crc kubenswrapper[4792]: I0929 19:17:05.682153 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"glance-default-external-api-0\" (UID: \"a0dfd532-bb4f-4a68-b334-e9dcdd0e2fa2\") " pod="openstack/glance-default-external-api-0" Sep 29 19:17:05 crc kubenswrapper[4792]: I0929 19:17:05.682181 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/a0dfd532-bb4f-4a68-b334-e9dcdd0e2fa2-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"a0dfd532-bb4f-4a68-b334-e9dcdd0e2fa2\") " pod="openstack/glance-default-external-api-0" Sep 29 19:17:05 crc kubenswrapper[4792]: I0929 19:17:05.682625 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g7j2m\" (UniqueName: \"kubernetes.io/projected/e77007af-4255-4480-94c7-acb59becda59-kube-api-access-g7j2m\") pod \"ceilometer-0\" (UID: \"e77007af-4255-4480-94c7-acb59becda59\") " pod="openstack/ceilometer-0" Sep 29 19:17:05 crc kubenswrapper[4792]: I0929 19:17:05.682651 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/e77007af-4255-4480-94c7-acb59becda59-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"e77007af-4255-4480-94c7-acb59becda59\") " pod="openstack/ceilometer-0" Sep 29 19:17:05 crc kubenswrapper[4792]: I0929 19:17:05.682675 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e77007af-4255-4480-94c7-acb59becda59-log-httpd\") pod \"ceilometer-0\" (UID: \"e77007af-4255-4480-94c7-acb59becda59\") " pod="openstack/ceilometer-0" Sep 29 19:17:05 crc kubenswrapper[4792]: I0929 19:17:05.682698 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a0dfd532-bb4f-4a68-b334-e9dcdd0e2fa2-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"a0dfd532-bb4f-4a68-b334-e9dcdd0e2fa2\") " pod="openstack/glance-default-external-api-0" Sep 29 19:17:05 crc kubenswrapper[4792]: I0929 19:17:05.682724 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e77007af-4255-4480-94c7-acb59becda59-config-data\") pod \"ceilometer-0\" (UID: \"e77007af-4255-4480-94c7-acb59becda59\") " pod="openstack/ceilometer-0" Sep 29 19:17:05 crc kubenswrapper[4792]: I0929 19:17:05.682740 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e77007af-4255-4480-94c7-acb59becda59-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"e77007af-4255-4480-94c7-acb59becda59\") " pod="openstack/ceilometer-0" Sep 29 19:17:05 crc kubenswrapper[4792]: I0929 19:17:05.682758 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e77007af-4255-4480-94c7-acb59becda59-scripts\") pod \"ceilometer-0\" (UID: \"e77007af-4255-4480-94c7-acb59becda59\") " pod="openstack/ceilometer-0" Sep 29 19:17:05 crc kubenswrapper[4792]: I0929 19:17:05.682775 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a0dfd532-bb4f-4a68-b334-e9dcdd0e2fa2-scripts\") pod \"glance-default-external-api-0\" (UID: \"a0dfd532-bb4f-4a68-b334-e9dcdd0e2fa2\") " pod="openstack/glance-default-external-api-0" Sep 29 19:17:05 crc kubenswrapper[4792]: I0929 19:17:05.682810 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/a0dfd532-bb4f-4a68-b334-e9dcdd0e2fa2-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"a0dfd532-bb4f-4a68-b334-e9dcdd0e2fa2\") " pod="openstack/glance-default-external-api-0" Sep 29 19:17:05 crc kubenswrapper[4792]: I0929 19:17:05.682829 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8bfg2\" (UniqueName: \"kubernetes.io/projected/a0dfd532-bb4f-4a68-b334-e9dcdd0e2fa2-kube-api-access-8bfg2\") pod \"glance-default-external-api-0\" (UID: \"a0dfd532-bb4f-4a68-b334-e9dcdd0e2fa2\") " pod="openstack/glance-default-external-api-0" Sep 29 19:17:05 crc kubenswrapper[4792]: I0929 19:17:05.682864 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e77007af-4255-4480-94c7-acb59becda59-run-httpd\") pod \"ceilometer-0\" (UID: \"e77007af-4255-4480-94c7-acb59becda59\") " pod="openstack/ceilometer-0" Sep 29 19:17:05 crc kubenswrapper[4792]: I0929 19:17:05.682918 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a0dfd532-bb4f-4a68-b334-e9dcdd0e2fa2-config-data\") pod \"glance-default-external-api-0\" (UID: \"a0dfd532-bb4f-4a68-b334-e9dcdd0e2fa2\") " pod="openstack/glance-default-external-api-0" Sep 29 19:17:05 crc kubenswrapper[4792]: I0929 19:17:05.684355 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e77007af-4255-4480-94c7-acb59becda59-log-httpd\") pod \"ceilometer-0\" (UID: \"e77007af-4255-4480-94c7-acb59becda59\") " pod="openstack/ceilometer-0" Sep 29 19:17:05 crc kubenswrapper[4792]: I0929 19:17:05.685285 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e77007af-4255-4480-94c7-acb59becda59-run-httpd\") pod \"ceilometer-0\" (UID: \"e77007af-4255-4480-94c7-acb59becda59\") " pod="openstack/ceilometer-0" Sep 29 19:17:05 crc kubenswrapper[4792]: I0929 19:17:05.686747 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e77007af-4255-4480-94c7-acb59becda59-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"e77007af-4255-4480-94c7-acb59becda59\") " pod="openstack/ceilometer-0" Sep 29 19:17:05 crc kubenswrapper[4792]: I0929 19:17:05.687357 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e77007af-4255-4480-94c7-acb59becda59-scripts\") pod \"ceilometer-0\" (UID: \"e77007af-4255-4480-94c7-acb59becda59\") " pod="openstack/ceilometer-0" Sep 29 19:17:05 crc kubenswrapper[4792]: I0929 19:17:05.703667 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/e77007af-4255-4480-94c7-acb59becda59-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"e77007af-4255-4480-94c7-acb59becda59\") " pod="openstack/ceilometer-0" Sep 29 19:17:05 crc kubenswrapper[4792]: I0929 19:17:05.703839 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g7j2m\" (UniqueName: \"kubernetes.io/projected/e77007af-4255-4480-94c7-acb59becda59-kube-api-access-g7j2m\") pod \"ceilometer-0\" (UID: \"e77007af-4255-4480-94c7-acb59becda59\") " pod="openstack/ceilometer-0" Sep 29 19:17:05 crc kubenswrapper[4792]: I0929 19:17:05.708671 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e77007af-4255-4480-94c7-acb59becda59-config-data\") pod \"ceilometer-0\" (UID: \"e77007af-4255-4480-94c7-acb59becda59\") " pod="openstack/ceilometer-0" Sep 29 19:17:05 crc kubenswrapper[4792]: I0929 19:17:05.711138 4792 scope.go:117] "RemoveContainer" containerID="c2333651cec5d27d20051349a4ed6406532cbb246ab58bf74f004cf9da156d8a" Sep 29 19:17:05 crc kubenswrapper[4792]: I0929 19:17:05.784388 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a0dfd532-bb4f-4a68-b334-e9dcdd0e2fa2-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"a0dfd532-bb4f-4a68-b334-e9dcdd0e2fa2\") " pod="openstack/glance-default-external-api-0" Sep 29 19:17:05 crc kubenswrapper[4792]: I0929 19:17:05.784694 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/fe46ba4e-dc2a-4960-97d1-fd34116ee7d6-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"fe46ba4e-dc2a-4960-97d1-fd34116ee7d6\") " pod="openstack/glance-default-internal-api-0" Sep 29 19:17:05 crc kubenswrapper[4792]: I0929 19:17:05.784719 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a0dfd532-bb4f-4a68-b334-e9dcdd0e2fa2-scripts\") pod \"glance-default-external-api-0\" (UID: \"a0dfd532-bb4f-4a68-b334-e9dcdd0e2fa2\") " pod="openstack/glance-default-external-api-0" Sep 29 19:17:05 crc kubenswrapper[4792]: I0929 19:17:05.784752 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/a0dfd532-bb4f-4a68-b334-e9dcdd0e2fa2-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"a0dfd532-bb4f-4a68-b334-e9dcdd0e2fa2\") " pod="openstack/glance-default-external-api-0" Sep 29 19:17:05 crc kubenswrapper[4792]: I0929 19:17:05.784769 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8bfg2\" (UniqueName: \"kubernetes.io/projected/a0dfd532-bb4f-4a68-b334-e9dcdd0e2fa2-kube-api-access-8bfg2\") pod \"glance-default-external-api-0\" (UID: \"a0dfd532-bb4f-4a68-b334-e9dcdd0e2fa2\") " pod="openstack/glance-default-external-api-0" Sep 29 19:17:05 crc kubenswrapper[4792]: I0929 19:17:05.784785 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fe46ba4e-dc2a-4960-97d1-fd34116ee7d6-config-data\") pod \"glance-default-internal-api-0\" (UID: \"fe46ba4e-dc2a-4960-97d1-fd34116ee7d6\") " pod="openstack/glance-default-internal-api-0" Sep 29 19:17:05 crc kubenswrapper[4792]: I0929 19:17:05.784807 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/fe46ba4e-dc2a-4960-97d1-fd34116ee7d6-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"fe46ba4e-dc2a-4960-97d1-fd34116ee7d6\") " pod="openstack/glance-default-internal-api-0" Sep 29 19:17:05 crc kubenswrapper[4792]: I0929 19:17:05.784836 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ss5tb\" (UniqueName: \"kubernetes.io/projected/fe46ba4e-dc2a-4960-97d1-fd34116ee7d6-kube-api-access-ss5tb\") pod \"glance-default-internal-api-0\" (UID: \"fe46ba4e-dc2a-4960-97d1-fd34116ee7d6\") " pod="openstack/glance-default-internal-api-0" Sep 29 19:17:05 crc kubenswrapper[4792]: I0929 19:17:05.784877 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fe46ba4e-dc2a-4960-97d1-fd34116ee7d6-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"fe46ba4e-dc2a-4960-97d1-fd34116ee7d6\") " pod="openstack/glance-default-internal-api-0" Sep 29 19:17:05 crc kubenswrapper[4792]: I0929 19:17:05.784897 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fe46ba4e-dc2a-4960-97d1-fd34116ee7d6-scripts\") pod \"glance-default-internal-api-0\" (UID: \"fe46ba4e-dc2a-4960-97d1-fd34116ee7d6\") " pod="openstack/glance-default-internal-api-0" Sep 29 19:17:05 crc kubenswrapper[4792]: I0929 19:17:05.784921 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fe46ba4e-dc2a-4960-97d1-fd34116ee7d6-logs\") pod \"glance-default-internal-api-0\" (UID: \"fe46ba4e-dc2a-4960-97d1-fd34116ee7d6\") " pod="openstack/glance-default-internal-api-0" Sep 29 19:17:05 crc kubenswrapper[4792]: I0929 19:17:05.784949 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a0dfd532-bb4f-4a68-b334-e9dcdd0e2fa2-config-data\") pod \"glance-default-external-api-0\" (UID: \"a0dfd532-bb4f-4a68-b334-e9dcdd0e2fa2\") " pod="openstack/glance-default-external-api-0" Sep 29 19:17:05 crc kubenswrapper[4792]: I0929 19:17:05.784976 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a0dfd532-bb4f-4a68-b334-e9dcdd0e2fa2-logs\") pod \"glance-default-external-api-0\" (UID: \"a0dfd532-bb4f-4a68-b334-e9dcdd0e2fa2\") " pod="openstack/glance-default-external-api-0" Sep 29 19:17:05 crc kubenswrapper[4792]: I0929 19:17:05.784996 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"glance-default-internal-api-0\" (UID: \"fe46ba4e-dc2a-4960-97d1-fd34116ee7d6\") " pod="openstack/glance-default-internal-api-0" Sep 29 19:17:05 crc kubenswrapper[4792]: I0929 19:17:05.785025 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"glance-default-external-api-0\" (UID: \"a0dfd532-bb4f-4a68-b334-e9dcdd0e2fa2\") " pod="openstack/glance-default-external-api-0" Sep 29 19:17:05 crc kubenswrapper[4792]: I0929 19:17:05.785046 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/a0dfd532-bb4f-4a68-b334-e9dcdd0e2fa2-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"a0dfd532-bb4f-4a68-b334-e9dcdd0e2fa2\") " pod="openstack/glance-default-external-api-0" Sep 29 19:17:05 crc kubenswrapper[4792]: I0929 19:17:05.785487 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/a0dfd532-bb4f-4a68-b334-e9dcdd0e2fa2-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"a0dfd532-bb4f-4a68-b334-e9dcdd0e2fa2\") " pod="openstack/glance-default-external-api-0" Sep 29 19:17:05 crc kubenswrapper[4792]: I0929 19:17:05.786179 4792 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"glance-default-external-api-0\" (UID: \"a0dfd532-bb4f-4a68-b334-e9dcdd0e2fa2\") device mount path \"/mnt/openstack/pv04\"" pod="openstack/glance-default-external-api-0" Sep 29 19:17:05 crc kubenswrapper[4792]: I0929 19:17:05.786350 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a0dfd532-bb4f-4a68-b334-e9dcdd0e2fa2-logs\") pod \"glance-default-external-api-0\" (UID: \"a0dfd532-bb4f-4a68-b334-e9dcdd0e2fa2\") " pod="openstack/glance-default-external-api-0" Sep 29 19:17:05 crc kubenswrapper[4792]: I0929 19:17:05.789050 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a0dfd532-bb4f-4a68-b334-e9dcdd0e2fa2-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"a0dfd532-bb4f-4a68-b334-e9dcdd0e2fa2\") " pod="openstack/glance-default-external-api-0" Sep 29 19:17:05 crc kubenswrapper[4792]: I0929 19:17:05.789437 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a0dfd532-bb4f-4a68-b334-e9dcdd0e2fa2-config-data\") pod \"glance-default-external-api-0\" (UID: \"a0dfd532-bb4f-4a68-b334-e9dcdd0e2fa2\") " pod="openstack/glance-default-external-api-0" Sep 29 19:17:05 crc kubenswrapper[4792]: I0929 19:17:05.795500 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a0dfd532-bb4f-4a68-b334-e9dcdd0e2fa2-scripts\") pod \"glance-default-external-api-0\" (UID: \"a0dfd532-bb4f-4a68-b334-e9dcdd0e2fa2\") " pod="openstack/glance-default-external-api-0" Sep 29 19:17:05 crc kubenswrapper[4792]: I0929 19:17:05.801009 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/a0dfd532-bb4f-4a68-b334-e9dcdd0e2fa2-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"a0dfd532-bb4f-4a68-b334-e9dcdd0e2fa2\") " pod="openstack/glance-default-external-api-0" Sep 29 19:17:05 crc kubenswrapper[4792]: I0929 19:17:05.815020 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8bfg2\" (UniqueName: \"kubernetes.io/projected/a0dfd532-bb4f-4a68-b334-e9dcdd0e2fa2-kube-api-access-8bfg2\") pod \"glance-default-external-api-0\" (UID: \"a0dfd532-bb4f-4a68-b334-e9dcdd0e2fa2\") " pod="openstack/glance-default-external-api-0" Sep 29 19:17:05 crc kubenswrapper[4792]: I0929 19:17:05.844678 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 29 19:17:05 crc kubenswrapper[4792]: I0929 19:17:05.860769 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"glance-default-external-api-0\" (UID: \"a0dfd532-bb4f-4a68-b334-e9dcdd0e2fa2\") " pod="openstack/glance-default-external-api-0" Sep 29 19:17:05 crc kubenswrapper[4792]: I0929 19:17:05.879416 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Sep 29 19:17:05 crc kubenswrapper[4792]: I0929 19:17:05.886697 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"glance-default-internal-api-0\" (UID: \"fe46ba4e-dc2a-4960-97d1-fd34116ee7d6\") " pod="openstack/glance-default-internal-api-0" Sep 29 19:17:05 crc kubenswrapper[4792]: I0929 19:17:05.886809 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/fe46ba4e-dc2a-4960-97d1-fd34116ee7d6-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"fe46ba4e-dc2a-4960-97d1-fd34116ee7d6\") " pod="openstack/glance-default-internal-api-0" Sep 29 19:17:05 crc kubenswrapper[4792]: I0929 19:17:05.887069 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fe46ba4e-dc2a-4960-97d1-fd34116ee7d6-config-data\") pod \"glance-default-internal-api-0\" (UID: \"fe46ba4e-dc2a-4960-97d1-fd34116ee7d6\") " pod="openstack/glance-default-internal-api-0" Sep 29 19:17:05 crc kubenswrapper[4792]: I0929 19:17:05.887104 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/fe46ba4e-dc2a-4960-97d1-fd34116ee7d6-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"fe46ba4e-dc2a-4960-97d1-fd34116ee7d6\") " pod="openstack/glance-default-internal-api-0" Sep 29 19:17:05 crc kubenswrapper[4792]: I0929 19:17:05.887132 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ss5tb\" (UniqueName: \"kubernetes.io/projected/fe46ba4e-dc2a-4960-97d1-fd34116ee7d6-kube-api-access-ss5tb\") pod \"glance-default-internal-api-0\" (UID: \"fe46ba4e-dc2a-4960-97d1-fd34116ee7d6\") " pod="openstack/glance-default-internal-api-0" Sep 29 19:17:05 crc kubenswrapper[4792]: I0929 19:17:05.887153 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fe46ba4e-dc2a-4960-97d1-fd34116ee7d6-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"fe46ba4e-dc2a-4960-97d1-fd34116ee7d6\") " pod="openstack/glance-default-internal-api-0" Sep 29 19:17:05 crc kubenswrapper[4792]: I0929 19:17:05.887176 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fe46ba4e-dc2a-4960-97d1-fd34116ee7d6-scripts\") pod \"glance-default-internal-api-0\" (UID: \"fe46ba4e-dc2a-4960-97d1-fd34116ee7d6\") " pod="openstack/glance-default-internal-api-0" Sep 29 19:17:05 crc kubenswrapper[4792]: I0929 19:17:05.887199 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fe46ba4e-dc2a-4960-97d1-fd34116ee7d6-logs\") pod \"glance-default-internal-api-0\" (UID: \"fe46ba4e-dc2a-4960-97d1-fd34116ee7d6\") " pod="openstack/glance-default-internal-api-0" Sep 29 19:17:05 crc kubenswrapper[4792]: I0929 19:17:05.887741 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fe46ba4e-dc2a-4960-97d1-fd34116ee7d6-logs\") pod \"glance-default-internal-api-0\" (UID: \"fe46ba4e-dc2a-4960-97d1-fd34116ee7d6\") " pod="openstack/glance-default-internal-api-0" Sep 29 19:17:05 crc kubenswrapper[4792]: I0929 19:17:05.888370 4792 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"glance-default-internal-api-0\" (UID: \"fe46ba4e-dc2a-4960-97d1-fd34116ee7d6\") device mount path \"/mnt/openstack/pv12\"" pod="openstack/glance-default-internal-api-0" Sep 29 19:17:05 crc kubenswrapper[4792]: I0929 19:17:05.908778 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fe46ba4e-dc2a-4960-97d1-fd34116ee7d6-scripts\") pod \"glance-default-internal-api-0\" (UID: \"fe46ba4e-dc2a-4960-97d1-fd34116ee7d6\") " pod="openstack/glance-default-internal-api-0" Sep 29 19:17:05 crc kubenswrapper[4792]: I0929 19:17:05.909025 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/fe46ba4e-dc2a-4960-97d1-fd34116ee7d6-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"fe46ba4e-dc2a-4960-97d1-fd34116ee7d6\") " pod="openstack/glance-default-internal-api-0" Sep 29 19:17:05 crc kubenswrapper[4792]: I0929 19:17:05.909996 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fe46ba4e-dc2a-4960-97d1-fd34116ee7d6-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"fe46ba4e-dc2a-4960-97d1-fd34116ee7d6\") " pod="openstack/glance-default-internal-api-0" Sep 29 19:17:05 crc kubenswrapper[4792]: I0929 19:17:05.936589 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/fe46ba4e-dc2a-4960-97d1-fd34116ee7d6-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"fe46ba4e-dc2a-4960-97d1-fd34116ee7d6\") " pod="openstack/glance-default-internal-api-0" Sep 29 19:17:05 crc kubenswrapper[4792]: I0929 19:17:05.938709 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ss5tb\" (UniqueName: \"kubernetes.io/projected/fe46ba4e-dc2a-4960-97d1-fd34116ee7d6-kube-api-access-ss5tb\") pod \"glance-default-internal-api-0\" (UID: \"fe46ba4e-dc2a-4960-97d1-fd34116ee7d6\") " pod="openstack/glance-default-internal-api-0" Sep 29 19:17:06 crc kubenswrapper[4792]: I0929 19:17:06.012135 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fe46ba4e-dc2a-4960-97d1-fd34116ee7d6-config-data\") pod \"glance-default-internal-api-0\" (UID: \"fe46ba4e-dc2a-4960-97d1-fd34116ee7d6\") " pod="openstack/glance-default-internal-api-0" Sep 29 19:17:06 crc kubenswrapper[4792]: I0929 19:17:06.014364 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"glance-default-internal-api-0\" (UID: \"fe46ba4e-dc2a-4960-97d1-fd34116ee7d6\") " pod="openstack/glance-default-internal-api-0" Sep 29 19:17:06 crc kubenswrapper[4792]: I0929 19:17:06.200291 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Sep 29 19:17:06 crc kubenswrapper[4792]: I0929 19:17:06.386820 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Sep 29 19:17:06 crc kubenswrapper[4792]: I0929 19:17:06.461823 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e77007af-4255-4480-94c7-acb59becda59","Type":"ContainerStarted","Data":"ec46fbba43d80ca8d05e53d1c381ad1baa6fcc24bd611a7657f486be6f46d9ed"} Sep 29 19:17:06 crc kubenswrapper[4792]: I0929 19:17:06.589158 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Sep 29 19:17:06 crc kubenswrapper[4792]: I0929 19:17:06.812113 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Sep 29 19:17:06 crc kubenswrapper[4792]: W0929 19:17:06.830802 4792 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podfe46ba4e_dc2a_4960_97d1_fd34116ee7d6.slice/crio-274ba09bf318570a41eabccab2feba74282a0ad4814e299825ee92017f37c5fb WatchSource:0}: Error finding container 274ba09bf318570a41eabccab2feba74282a0ad4814e299825ee92017f37c5fb: Status 404 returned error can't find the container with id 274ba09bf318570a41eabccab2feba74282a0ad4814e299825ee92017f37c5fb Sep 29 19:17:07 crc kubenswrapper[4792]: I0929 19:17:07.038229 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a38f93d7-b748-4e1f-beff-e89168177090" path="/var/lib/kubelet/pods/a38f93d7-b748-4e1f-beff-e89168177090/volumes" Sep 29 19:17:07 crc kubenswrapper[4792]: I0929 19:17:07.039201 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cf2861f4-a8d6-4c0e-bb03-bd2c51def90a" path="/var/lib/kubelet/pods/cf2861f4-a8d6-4c0e-bb03-bd2c51def90a/volumes" Sep 29 19:17:07 crc kubenswrapper[4792]: I0929 19:17:07.041099 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ffd2c1db-2c19-492f-8783-f03f235013da" path="/var/lib/kubelet/pods/ffd2c1db-2c19-492f-8783-f03f235013da/volumes" Sep 29 19:17:07 crc kubenswrapper[4792]: I0929 19:17:07.478641 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e77007af-4255-4480-94c7-acb59becda59","Type":"ContainerStarted","Data":"a6cba20d382b798595dfd1b557b6e7980a4e355e111516025afd83b40a93612d"} Sep 29 19:17:07 crc kubenswrapper[4792]: I0929 19:17:07.483976 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"a0dfd532-bb4f-4a68-b334-e9dcdd0e2fa2","Type":"ContainerStarted","Data":"7e628e72b2ca820142402eefbb1eb733284455d1027c3b80ec78bc7c20c52e60"} Sep 29 19:17:07 crc kubenswrapper[4792]: I0929 19:17:07.484027 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"a0dfd532-bb4f-4a68-b334-e9dcdd0e2fa2","Type":"ContainerStarted","Data":"1a7402ef1f36f1bbeb80a49fb45ab58a681b87078741152db2f6f17bdf68ef62"} Sep 29 19:17:07 crc kubenswrapper[4792]: I0929 19:17:07.485123 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"fe46ba4e-dc2a-4960-97d1-fd34116ee7d6","Type":"ContainerStarted","Data":"274ba09bf318570a41eabccab2feba74282a0ad4814e299825ee92017f37c5fb"} Sep 29 19:17:08 crc kubenswrapper[4792]: I0929 19:17:08.510196 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"fe46ba4e-dc2a-4960-97d1-fd34116ee7d6","Type":"ContainerStarted","Data":"eef0b619ce72cb2d9b7b5fb1fce06f2e75531c69e021af5d49d6ca8530bbb64a"} Sep 29 19:17:08 crc kubenswrapper[4792]: I0929 19:17:08.514345 4792 generic.go:334] "Generic (PLEG): container finished" podID="30fffd32-d307-47ec-b239-aeb8dd47ed41" containerID="a84173731e815c6cf253d60aba27125d90c40582e9c85ae3097bb2976d5772c9" exitCode=137 Sep 29 19:17:08 crc kubenswrapper[4792]: I0929 19:17:08.514546 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"30fffd32-d307-47ec-b239-aeb8dd47ed41","Type":"ContainerDied","Data":"a84173731e815c6cf253d60aba27125d90c40582e9c85ae3097bb2976d5772c9"} Sep 29 19:17:08 crc kubenswrapper[4792]: I0929 19:17:08.522879 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e77007af-4255-4480-94c7-acb59becda59","Type":"ContainerStarted","Data":"8216b4df3729c31b616b74070b4d0f867f577f818e1481d8960b3d56615d7ad0"} Sep 29 19:17:08 crc kubenswrapper[4792]: I0929 19:17:08.526168 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"a0dfd532-bb4f-4a68-b334-e9dcdd0e2fa2","Type":"ContainerStarted","Data":"e6164caf5b5916167925e7cbbf28c3f10158e3c79044bdf8cc10f4b6e83173ff"} Sep 29 19:17:08 crc kubenswrapper[4792]: I0929 19:17:08.559097 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=3.559071799 podStartE2EDuration="3.559071799s" podCreationTimestamp="2025-09-29 19:17:05 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 19:17:08.552304495 +0000 UTC m=+1240.545611891" watchObservedRunningTime="2025-09-29 19:17:08.559071799 +0000 UTC m=+1240.552379195" Sep 29 19:17:08 crc kubenswrapper[4792]: I0929 19:17:08.744088 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Sep 29 19:17:08 crc kubenswrapper[4792]: I0929 19:17:08.874523 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dks65\" (UniqueName: \"kubernetes.io/projected/30fffd32-d307-47ec-b239-aeb8dd47ed41-kube-api-access-dks65\") pod \"30fffd32-d307-47ec-b239-aeb8dd47ed41\" (UID: \"30fffd32-d307-47ec-b239-aeb8dd47ed41\") " Sep 29 19:17:08 crc kubenswrapper[4792]: I0929 19:17:08.874568 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/30fffd32-d307-47ec-b239-aeb8dd47ed41-logs\") pod \"30fffd32-d307-47ec-b239-aeb8dd47ed41\" (UID: \"30fffd32-d307-47ec-b239-aeb8dd47ed41\") " Sep 29 19:17:08 crc kubenswrapper[4792]: I0929 19:17:08.874618 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/30fffd32-d307-47ec-b239-aeb8dd47ed41-config-data\") pod \"30fffd32-d307-47ec-b239-aeb8dd47ed41\" (UID: \"30fffd32-d307-47ec-b239-aeb8dd47ed41\") " Sep 29 19:17:08 crc kubenswrapper[4792]: I0929 19:17:08.874683 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/30fffd32-d307-47ec-b239-aeb8dd47ed41-combined-ca-bundle\") pod \"30fffd32-d307-47ec-b239-aeb8dd47ed41\" (UID: \"30fffd32-d307-47ec-b239-aeb8dd47ed41\") " Sep 29 19:17:08 crc kubenswrapper[4792]: I0929 19:17:08.874795 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/30fffd32-d307-47ec-b239-aeb8dd47ed41-etc-machine-id\") pod \"30fffd32-d307-47ec-b239-aeb8dd47ed41\" (UID: \"30fffd32-d307-47ec-b239-aeb8dd47ed41\") " Sep 29 19:17:08 crc kubenswrapper[4792]: I0929 19:17:08.874811 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/30fffd32-d307-47ec-b239-aeb8dd47ed41-config-data-custom\") pod \"30fffd32-d307-47ec-b239-aeb8dd47ed41\" (UID: \"30fffd32-d307-47ec-b239-aeb8dd47ed41\") " Sep 29 19:17:08 crc kubenswrapper[4792]: I0929 19:17:08.874837 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/30fffd32-d307-47ec-b239-aeb8dd47ed41-scripts\") pod \"30fffd32-d307-47ec-b239-aeb8dd47ed41\" (UID: \"30fffd32-d307-47ec-b239-aeb8dd47ed41\") " Sep 29 19:17:08 crc kubenswrapper[4792]: I0929 19:17:08.875409 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/30fffd32-d307-47ec-b239-aeb8dd47ed41-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "30fffd32-d307-47ec-b239-aeb8dd47ed41" (UID: "30fffd32-d307-47ec-b239-aeb8dd47ed41"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 29 19:17:08 crc kubenswrapper[4792]: I0929 19:17:08.876503 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/30fffd32-d307-47ec-b239-aeb8dd47ed41-logs" (OuterVolumeSpecName: "logs") pod "30fffd32-d307-47ec-b239-aeb8dd47ed41" (UID: "30fffd32-d307-47ec-b239-aeb8dd47ed41"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 19:17:08 crc kubenswrapper[4792]: I0929 19:17:08.882667 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/30fffd32-d307-47ec-b239-aeb8dd47ed41-scripts" (OuterVolumeSpecName: "scripts") pod "30fffd32-d307-47ec-b239-aeb8dd47ed41" (UID: "30fffd32-d307-47ec-b239-aeb8dd47ed41"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:17:08 crc kubenswrapper[4792]: I0929 19:17:08.884940 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/30fffd32-d307-47ec-b239-aeb8dd47ed41-kube-api-access-dks65" (OuterVolumeSpecName: "kube-api-access-dks65") pod "30fffd32-d307-47ec-b239-aeb8dd47ed41" (UID: "30fffd32-d307-47ec-b239-aeb8dd47ed41"). InnerVolumeSpecName "kube-api-access-dks65". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:17:08 crc kubenswrapper[4792]: I0929 19:17:08.886591 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/30fffd32-d307-47ec-b239-aeb8dd47ed41-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "30fffd32-d307-47ec-b239-aeb8dd47ed41" (UID: "30fffd32-d307-47ec-b239-aeb8dd47ed41"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:17:08 crc kubenswrapper[4792]: I0929 19:17:08.963620 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/30fffd32-d307-47ec-b239-aeb8dd47ed41-config-data" (OuterVolumeSpecName: "config-data") pod "30fffd32-d307-47ec-b239-aeb8dd47ed41" (UID: "30fffd32-d307-47ec-b239-aeb8dd47ed41"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:17:08 crc kubenswrapper[4792]: I0929 19:17:08.992764 4792 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/30fffd32-d307-47ec-b239-aeb8dd47ed41-etc-machine-id\") on node \"crc\" DevicePath \"\"" Sep 29 19:17:08 crc kubenswrapper[4792]: I0929 19:17:08.992802 4792 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/30fffd32-d307-47ec-b239-aeb8dd47ed41-config-data-custom\") on node \"crc\" DevicePath \"\"" Sep 29 19:17:08 crc kubenswrapper[4792]: I0929 19:17:08.992816 4792 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/30fffd32-d307-47ec-b239-aeb8dd47ed41-scripts\") on node \"crc\" DevicePath \"\"" Sep 29 19:17:08 crc kubenswrapper[4792]: I0929 19:17:08.992827 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dks65\" (UniqueName: \"kubernetes.io/projected/30fffd32-d307-47ec-b239-aeb8dd47ed41-kube-api-access-dks65\") on node \"crc\" DevicePath \"\"" Sep 29 19:17:08 crc kubenswrapper[4792]: I0929 19:17:08.992838 4792 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/30fffd32-d307-47ec-b239-aeb8dd47ed41-logs\") on node \"crc\" DevicePath \"\"" Sep 29 19:17:08 crc kubenswrapper[4792]: I0929 19:17:08.992860 4792 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/30fffd32-d307-47ec-b239-aeb8dd47ed41-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 19:17:09 crc kubenswrapper[4792]: I0929 19:17:09.005402 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/30fffd32-d307-47ec-b239-aeb8dd47ed41-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "30fffd32-d307-47ec-b239-aeb8dd47ed41" (UID: "30fffd32-d307-47ec-b239-aeb8dd47ed41"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:17:09 crc kubenswrapper[4792]: I0929 19:17:09.081452 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Sep 29 19:17:09 crc kubenswrapper[4792]: I0929 19:17:09.094824 4792 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/30fffd32-d307-47ec-b239-aeb8dd47ed41-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 19:17:09 crc kubenswrapper[4792]: I0929 19:17:09.537576 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e77007af-4255-4480-94c7-acb59becda59","Type":"ContainerStarted","Data":"6bb8b6bb8c8aeb9dc622fb5d65252eafbb9f3302265a5bc8d7c72f6c712df00d"} Sep 29 19:17:09 crc kubenswrapper[4792]: I0929 19:17:09.539620 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"fe46ba4e-dc2a-4960-97d1-fd34116ee7d6","Type":"ContainerStarted","Data":"cb6e35ca9dcfe3a49af45b7a18e8a7942d4a86aa72a083902c9af24eb8ecdfef"} Sep 29 19:17:09 crc kubenswrapper[4792]: I0929 19:17:09.541801 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"30fffd32-d307-47ec-b239-aeb8dd47ed41","Type":"ContainerDied","Data":"73f847753de886d2aab1d5de4349534c5f118dd3ba66a169ddef897e851624a2"} Sep 29 19:17:09 crc kubenswrapper[4792]: I0929 19:17:09.541885 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Sep 29 19:17:09 crc kubenswrapper[4792]: I0929 19:17:09.541974 4792 scope.go:117] "RemoveContainer" containerID="a84173731e815c6cf253d60aba27125d90c40582e9c85ae3097bb2976d5772c9" Sep 29 19:17:09 crc kubenswrapper[4792]: I0929 19:17:09.565832 4792 scope.go:117] "RemoveContainer" containerID="fba4bc7f1bf62433ff5b0ded0f141d63cbde46b9dafb84191bda76ec8c7a68e2" Sep 29 19:17:09 crc kubenswrapper[4792]: I0929 19:17:09.575157 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=4.5751363860000005 podStartE2EDuration="4.575136386s" podCreationTimestamp="2025-09-29 19:17:05 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 19:17:09.571764929 +0000 UTC m=+1241.565072325" watchObservedRunningTime="2025-09-29 19:17:09.575136386 +0000 UTC m=+1241.568443782" Sep 29 19:17:09 crc kubenswrapper[4792]: I0929 19:17:09.601985 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Sep 29 19:17:09 crc kubenswrapper[4792]: I0929 19:17:09.616188 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-api-0"] Sep 29 19:17:09 crc kubenswrapper[4792]: I0929 19:17:09.634428 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-api-0"] Sep 29 19:17:09 crc kubenswrapper[4792]: E0929 19:17:09.635029 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="30fffd32-d307-47ec-b239-aeb8dd47ed41" containerName="cinder-api" Sep 29 19:17:09 crc kubenswrapper[4792]: I0929 19:17:09.635050 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="30fffd32-d307-47ec-b239-aeb8dd47ed41" containerName="cinder-api" Sep 29 19:17:09 crc kubenswrapper[4792]: E0929 19:17:09.635086 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="30fffd32-d307-47ec-b239-aeb8dd47ed41" containerName="cinder-api-log" Sep 29 19:17:09 crc kubenswrapper[4792]: I0929 19:17:09.635111 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="30fffd32-d307-47ec-b239-aeb8dd47ed41" containerName="cinder-api-log" Sep 29 19:17:09 crc kubenswrapper[4792]: I0929 19:17:09.635435 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="30fffd32-d307-47ec-b239-aeb8dd47ed41" containerName="cinder-api" Sep 29 19:17:09 crc kubenswrapper[4792]: I0929 19:17:09.635452 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="30fffd32-d307-47ec-b239-aeb8dd47ed41" containerName="cinder-api-log" Sep 29 19:17:09 crc kubenswrapper[4792]: I0929 19:17:09.636444 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Sep 29 19:17:09 crc kubenswrapper[4792]: I0929 19:17:09.639307 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-cinder-public-svc" Sep 29 19:17:09 crc kubenswrapper[4792]: I0929 19:17:09.639992 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-cinder-internal-svc" Sep 29 19:17:09 crc kubenswrapper[4792]: I0929 19:17:09.640056 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-api-config-data" Sep 29 19:17:09 crc kubenswrapper[4792]: I0929 19:17:09.659519 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Sep 29 19:17:09 crc kubenswrapper[4792]: I0929 19:17:09.807717 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/0c9927d9-0800-4bfa-bee9-af02caf9596c-config-data-custom\") pod \"cinder-api-0\" (UID: \"0c9927d9-0800-4bfa-bee9-af02caf9596c\") " pod="openstack/cinder-api-0" Sep 29 19:17:09 crc kubenswrapper[4792]: I0929 19:17:09.808009 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/0c9927d9-0800-4bfa-bee9-af02caf9596c-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"0c9927d9-0800-4bfa-bee9-af02caf9596c\") " pod="openstack/cinder-api-0" Sep 29 19:17:09 crc kubenswrapper[4792]: I0929 19:17:09.808060 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0c9927d9-0800-4bfa-bee9-af02caf9596c-config-data\") pod \"cinder-api-0\" (UID: \"0c9927d9-0800-4bfa-bee9-af02caf9596c\") " pod="openstack/cinder-api-0" Sep 29 19:17:09 crc kubenswrapper[4792]: I0929 19:17:09.808081 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0c9927d9-0800-4bfa-bee9-af02caf9596c-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"0c9927d9-0800-4bfa-bee9-af02caf9596c\") " pod="openstack/cinder-api-0" Sep 29 19:17:09 crc kubenswrapper[4792]: I0929 19:17:09.808108 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/0c9927d9-0800-4bfa-bee9-af02caf9596c-public-tls-certs\") pod \"cinder-api-0\" (UID: \"0c9927d9-0800-4bfa-bee9-af02caf9596c\") " pod="openstack/cinder-api-0" Sep 29 19:17:09 crc kubenswrapper[4792]: I0929 19:17:09.808141 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0c9927d9-0800-4bfa-bee9-af02caf9596c-scripts\") pod \"cinder-api-0\" (UID: \"0c9927d9-0800-4bfa-bee9-af02caf9596c\") " pod="openstack/cinder-api-0" Sep 29 19:17:09 crc kubenswrapper[4792]: I0929 19:17:09.808156 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/0c9927d9-0800-4bfa-bee9-af02caf9596c-etc-machine-id\") pod \"cinder-api-0\" (UID: \"0c9927d9-0800-4bfa-bee9-af02caf9596c\") " pod="openstack/cinder-api-0" Sep 29 19:17:09 crc kubenswrapper[4792]: I0929 19:17:09.808183 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8lz2g\" (UniqueName: \"kubernetes.io/projected/0c9927d9-0800-4bfa-bee9-af02caf9596c-kube-api-access-8lz2g\") pod \"cinder-api-0\" (UID: \"0c9927d9-0800-4bfa-bee9-af02caf9596c\") " pod="openstack/cinder-api-0" Sep 29 19:17:09 crc kubenswrapper[4792]: I0929 19:17:09.808205 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0c9927d9-0800-4bfa-bee9-af02caf9596c-logs\") pod \"cinder-api-0\" (UID: \"0c9927d9-0800-4bfa-bee9-af02caf9596c\") " pod="openstack/cinder-api-0" Sep 29 19:17:09 crc kubenswrapper[4792]: I0929 19:17:09.910270 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0c9927d9-0800-4bfa-bee9-af02caf9596c-scripts\") pod \"cinder-api-0\" (UID: \"0c9927d9-0800-4bfa-bee9-af02caf9596c\") " pod="openstack/cinder-api-0" Sep 29 19:17:09 crc kubenswrapper[4792]: I0929 19:17:09.910312 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/0c9927d9-0800-4bfa-bee9-af02caf9596c-etc-machine-id\") pod \"cinder-api-0\" (UID: \"0c9927d9-0800-4bfa-bee9-af02caf9596c\") " pod="openstack/cinder-api-0" Sep 29 19:17:09 crc kubenswrapper[4792]: I0929 19:17:09.910476 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8lz2g\" (UniqueName: \"kubernetes.io/projected/0c9927d9-0800-4bfa-bee9-af02caf9596c-kube-api-access-8lz2g\") pod \"cinder-api-0\" (UID: \"0c9927d9-0800-4bfa-bee9-af02caf9596c\") " pod="openstack/cinder-api-0" Sep 29 19:17:09 crc kubenswrapper[4792]: I0929 19:17:09.910502 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/0c9927d9-0800-4bfa-bee9-af02caf9596c-etc-machine-id\") pod \"cinder-api-0\" (UID: \"0c9927d9-0800-4bfa-bee9-af02caf9596c\") " pod="openstack/cinder-api-0" Sep 29 19:17:09 crc kubenswrapper[4792]: I0929 19:17:09.910508 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0c9927d9-0800-4bfa-bee9-af02caf9596c-logs\") pod \"cinder-api-0\" (UID: \"0c9927d9-0800-4bfa-bee9-af02caf9596c\") " pod="openstack/cinder-api-0" Sep 29 19:17:09 crc kubenswrapper[4792]: I0929 19:17:09.910681 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/0c9927d9-0800-4bfa-bee9-af02caf9596c-config-data-custom\") pod \"cinder-api-0\" (UID: \"0c9927d9-0800-4bfa-bee9-af02caf9596c\") " pod="openstack/cinder-api-0" Sep 29 19:17:09 crc kubenswrapper[4792]: I0929 19:17:09.910748 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/0c9927d9-0800-4bfa-bee9-af02caf9596c-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"0c9927d9-0800-4bfa-bee9-af02caf9596c\") " pod="openstack/cinder-api-0" Sep 29 19:17:09 crc kubenswrapper[4792]: I0929 19:17:09.910865 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0c9927d9-0800-4bfa-bee9-af02caf9596c-config-data\") pod \"cinder-api-0\" (UID: \"0c9927d9-0800-4bfa-bee9-af02caf9596c\") " pod="openstack/cinder-api-0" Sep 29 19:17:09 crc kubenswrapper[4792]: I0929 19:17:09.910910 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0c9927d9-0800-4bfa-bee9-af02caf9596c-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"0c9927d9-0800-4bfa-bee9-af02caf9596c\") " pod="openstack/cinder-api-0" Sep 29 19:17:09 crc kubenswrapper[4792]: I0929 19:17:09.910954 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/0c9927d9-0800-4bfa-bee9-af02caf9596c-public-tls-certs\") pod \"cinder-api-0\" (UID: \"0c9927d9-0800-4bfa-bee9-af02caf9596c\") " pod="openstack/cinder-api-0" Sep 29 19:17:09 crc kubenswrapper[4792]: I0929 19:17:09.912688 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0c9927d9-0800-4bfa-bee9-af02caf9596c-logs\") pod \"cinder-api-0\" (UID: \"0c9927d9-0800-4bfa-bee9-af02caf9596c\") " pod="openstack/cinder-api-0" Sep 29 19:17:09 crc kubenswrapper[4792]: I0929 19:17:09.916421 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/0c9927d9-0800-4bfa-bee9-af02caf9596c-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"0c9927d9-0800-4bfa-bee9-af02caf9596c\") " pod="openstack/cinder-api-0" Sep 29 19:17:09 crc kubenswrapper[4792]: I0929 19:17:09.917703 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/0c9927d9-0800-4bfa-bee9-af02caf9596c-public-tls-certs\") pod \"cinder-api-0\" (UID: \"0c9927d9-0800-4bfa-bee9-af02caf9596c\") " pod="openstack/cinder-api-0" Sep 29 19:17:09 crc kubenswrapper[4792]: I0929 19:17:09.918035 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0c9927d9-0800-4bfa-bee9-af02caf9596c-scripts\") pod \"cinder-api-0\" (UID: \"0c9927d9-0800-4bfa-bee9-af02caf9596c\") " pod="openstack/cinder-api-0" Sep 29 19:17:09 crc kubenswrapper[4792]: I0929 19:17:09.919477 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/0c9927d9-0800-4bfa-bee9-af02caf9596c-config-data-custom\") pod \"cinder-api-0\" (UID: \"0c9927d9-0800-4bfa-bee9-af02caf9596c\") " pod="openstack/cinder-api-0" Sep 29 19:17:09 crc kubenswrapper[4792]: I0929 19:17:09.920041 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0c9927d9-0800-4bfa-bee9-af02caf9596c-config-data\") pod \"cinder-api-0\" (UID: \"0c9927d9-0800-4bfa-bee9-af02caf9596c\") " pod="openstack/cinder-api-0" Sep 29 19:17:09 crc kubenswrapper[4792]: I0929 19:17:09.929894 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8lz2g\" (UniqueName: \"kubernetes.io/projected/0c9927d9-0800-4bfa-bee9-af02caf9596c-kube-api-access-8lz2g\") pod \"cinder-api-0\" (UID: \"0c9927d9-0800-4bfa-bee9-af02caf9596c\") " pod="openstack/cinder-api-0" Sep 29 19:17:09 crc kubenswrapper[4792]: I0929 19:17:09.930268 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0c9927d9-0800-4bfa-bee9-af02caf9596c-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"0c9927d9-0800-4bfa-bee9-af02caf9596c\") " pod="openstack/cinder-api-0" Sep 29 19:17:09 crc kubenswrapper[4792]: I0929 19:17:09.963787 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Sep 29 19:17:10 crc kubenswrapper[4792]: I0929 19:17:10.450585 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Sep 29 19:17:10 crc kubenswrapper[4792]: I0929 19:17:10.563212 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"0c9927d9-0800-4bfa-bee9-af02caf9596c","Type":"ContainerStarted","Data":"55830387248a03dd657f5151ae59cbc4b7d440e02b8f531194896d41346d7e4c"} Sep 29 19:17:11 crc kubenswrapper[4792]: I0929 19:17:11.030868 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="30fffd32-d307-47ec-b239-aeb8dd47ed41" path="/var/lib/kubelet/pods/30fffd32-d307-47ec-b239-aeb8dd47ed41/volumes" Sep 29 19:17:11 crc kubenswrapper[4792]: I0929 19:17:11.594505 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"0c9927d9-0800-4bfa-bee9-af02caf9596c","Type":"ContainerStarted","Data":"b1a92a86b174a1c811351e4ddcf726e0d5771a12f2f5fce36d3dd5cf3a62ffd6"} Sep 29 19:17:11 crc kubenswrapper[4792]: I0929 19:17:11.599995 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e77007af-4255-4480-94c7-acb59becda59","Type":"ContainerStarted","Data":"996fb6aae79f601f4893d0db53352ced763636f282dc780968cda5952ea7a977"} Sep 29 19:17:11 crc kubenswrapper[4792]: I0929 19:17:11.600176 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="e77007af-4255-4480-94c7-acb59becda59" containerName="ceilometer-central-agent" containerID="cri-o://a6cba20d382b798595dfd1b557b6e7980a4e355e111516025afd83b40a93612d" gracePeriod=30 Sep 29 19:17:11 crc kubenswrapper[4792]: I0929 19:17:11.600276 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Sep 29 19:17:11 crc kubenswrapper[4792]: I0929 19:17:11.600600 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="e77007af-4255-4480-94c7-acb59becda59" containerName="proxy-httpd" containerID="cri-o://996fb6aae79f601f4893d0db53352ced763636f282dc780968cda5952ea7a977" gracePeriod=30 Sep 29 19:17:11 crc kubenswrapper[4792]: I0929 19:17:11.600650 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="e77007af-4255-4480-94c7-acb59becda59" containerName="sg-core" containerID="cri-o://6bb8b6bb8c8aeb9dc622fb5d65252eafbb9f3302265a5bc8d7c72f6c712df00d" gracePeriod=30 Sep 29 19:17:11 crc kubenswrapper[4792]: I0929 19:17:11.600686 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="e77007af-4255-4480-94c7-acb59becda59" containerName="ceilometer-notification-agent" containerID="cri-o://8216b4df3729c31b616b74070b4d0f867f577f818e1481d8960b3d56615d7ad0" gracePeriod=30 Sep 29 19:17:11 crc kubenswrapper[4792]: I0929 19:17:11.664436 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.311462014 podStartE2EDuration="6.664411056s" podCreationTimestamp="2025-09-29 19:17:05 +0000 UTC" firstStartedPulling="2025-09-29 19:17:06.427361346 +0000 UTC m=+1238.420668742" lastFinishedPulling="2025-09-29 19:17:10.780310388 +0000 UTC m=+1242.773617784" observedRunningTime="2025-09-29 19:17:11.624322116 +0000 UTC m=+1243.617629512" watchObservedRunningTime="2025-09-29 19:17:11.664411056 +0000 UTC m=+1243.657718452" Sep 29 19:17:11 crc kubenswrapper[4792]: I0929 19:17:11.960208 4792 patch_prober.go:28] interesting pod/machine-config-daemon-p5q59 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 19:17:11 crc kubenswrapper[4792]: I0929 19:17:11.960256 4792 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" podUID="0ae66548-086e-4ca9-bd6f-281ce46e7557" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 19:17:12 crc kubenswrapper[4792]: I0929 19:17:12.602557 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 29 19:17:12 crc kubenswrapper[4792]: I0929 19:17:12.609235 4792 generic.go:334] "Generic (PLEG): container finished" podID="e77007af-4255-4480-94c7-acb59becda59" containerID="996fb6aae79f601f4893d0db53352ced763636f282dc780968cda5952ea7a977" exitCode=0 Sep 29 19:17:12 crc kubenswrapper[4792]: I0929 19:17:12.609263 4792 generic.go:334] "Generic (PLEG): container finished" podID="e77007af-4255-4480-94c7-acb59becda59" containerID="6bb8b6bb8c8aeb9dc622fb5d65252eafbb9f3302265a5bc8d7c72f6c712df00d" exitCode=2 Sep 29 19:17:12 crc kubenswrapper[4792]: I0929 19:17:12.609270 4792 generic.go:334] "Generic (PLEG): container finished" podID="e77007af-4255-4480-94c7-acb59becda59" containerID="8216b4df3729c31b616b74070b4d0f867f577f818e1481d8960b3d56615d7ad0" exitCode=0 Sep 29 19:17:12 crc kubenswrapper[4792]: I0929 19:17:12.609277 4792 generic.go:334] "Generic (PLEG): container finished" podID="e77007af-4255-4480-94c7-acb59becda59" containerID="a6cba20d382b798595dfd1b557b6e7980a4e355e111516025afd83b40a93612d" exitCode=0 Sep 29 19:17:12 crc kubenswrapper[4792]: I0929 19:17:12.609312 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e77007af-4255-4480-94c7-acb59becda59","Type":"ContainerDied","Data":"996fb6aae79f601f4893d0db53352ced763636f282dc780968cda5952ea7a977"} Sep 29 19:17:12 crc kubenswrapper[4792]: I0929 19:17:12.609335 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e77007af-4255-4480-94c7-acb59becda59","Type":"ContainerDied","Data":"6bb8b6bb8c8aeb9dc622fb5d65252eafbb9f3302265a5bc8d7c72f6c712df00d"} Sep 29 19:17:12 crc kubenswrapper[4792]: I0929 19:17:12.609344 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e77007af-4255-4480-94c7-acb59becda59","Type":"ContainerDied","Data":"8216b4df3729c31b616b74070b4d0f867f577f818e1481d8960b3d56615d7ad0"} Sep 29 19:17:12 crc kubenswrapper[4792]: I0929 19:17:12.609354 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e77007af-4255-4480-94c7-acb59becda59","Type":"ContainerDied","Data":"a6cba20d382b798595dfd1b557b6e7980a4e355e111516025afd83b40a93612d"} Sep 29 19:17:12 crc kubenswrapper[4792]: I0929 19:17:12.609363 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e77007af-4255-4480-94c7-acb59becda59","Type":"ContainerDied","Data":"ec46fbba43d80ca8d05e53d1c381ad1baa6fcc24bd611a7657f486be6f46d9ed"} Sep 29 19:17:12 crc kubenswrapper[4792]: I0929 19:17:12.609377 4792 scope.go:117] "RemoveContainer" containerID="996fb6aae79f601f4893d0db53352ced763636f282dc780968cda5952ea7a977" Sep 29 19:17:12 crc kubenswrapper[4792]: I0929 19:17:12.609518 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 29 19:17:12 crc kubenswrapper[4792]: I0929 19:17:12.612619 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"0c9927d9-0800-4bfa-bee9-af02caf9596c","Type":"ContainerStarted","Data":"8af17188a5f5e34f91ea90a56abfa9d2a41a4d072b552b3b15d086dcfdd13f0b"} Sep 29 19:17:12 crc kubenswrapper[4792]: I0929 19:17:12.612783 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cinder-api-0" Sep 29 19:17:12 crc kubenswrapper[4792]: I0929 19:17:12.635826 4792 scope.go:117] "RemoveContainer" containerID="6bb8b6bb8c8aeb9dc622fb5d65252eafbb9f3302265a5bc8d7c72f6c712df00d" Sep 29 19:17:12 crc kubenswrapper[4792]: I0929 19:17:12.654391 4792 scope.go:117] "RemoveContainer" containerID="8216b4df3729c31b616b74070b4d0f867f577f818e1481d8960b3d56615d7ad0" Sep 29 19:17:12 crc kubenswrapper[4792]: I0929 19:17:12.673881 4792 scope.go:117] "RemoveContainer" containerID="a6cba20d382b798595dfd1b557b6e7980a4e355e111516025afd83b40a93612d" Sep 29 19:17:12 crc kubenswrapper[4792]: I0929 19:17:12.713686 4792 scope.go:117] "RemoveContainer" containerID="996fb6aae79f601f4893d0db53352ced763636f282dc780968cda5952ea7a977" Sep 29 19:17:12 crc kubenswrapper[4792]: E0929 19:17:12.714270 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"996fb6aae79f601f4893d0db53352ced763636f282dc780968cda5952ea7a977\": container with ID starting with 996fb6aae79f601f4893d0db53352ced763636f282dc780968cda5952ea7a977 not found: ID does not exist" containerID="996fb6aae79f601f4893d0db53352ced763636f282dc780968cda5952ea7a977" Sep 29 19:17:12 crc kubenswrapper[4792]: I0929 19:17:12.714316 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"996fb6aae79f601f4893d0db53352ced763636f282dc780968cda5952ea7a977"} err="failed to get container status \"996fb6aae79f601f4893d0db53352ced763636f282dc780968cda5952ea7a977\": rpc error: code = NotFound desc = could not find container \"996fb6aae79f601f4893d0db53352ced763636f282dc780968cda5952ea7a977\": container with ID starting with 996fb6aae79f601f4893d0db53352ced763636f282dc780968cda5952ea7a977 not found: ID does not exist" Sep 29 19:17:12 crc kubenswrapper[4792]: I0929 19:17:12.714338 4792 scope.go:117] "RemoveContainer" containerID="6bb8b6bb8c8aeb9dc622fb5d65252eafbb9f3302265a5bc8d7c72f6c712df00d" Sep 29 19:17:12 crc kubenswrapper[4792]: E0929 19:17:12.719030 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6bb8b6bb8c8aeb9dc622fb5d65252eafbb9f3302265a5bc8d7c72f6c712df00d\": container with ID starting with 6bb8b6bb8c8aeb9dc622fb5d65252eafbb9f3302265a5bc8d7c72f6c712df00d not found: ID does not exist" containerID="6bb8b6bb8c8aeb9dc622fb5d65252eafbb9f3302265a5bc8d7c72f6c712df00d" Sep 29 19:17:12 crc kubenswrapper[4792]: I0929 19:17:12.719063 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6bb8b6bb8c8aeb9dc622fb5d65252eafbb9f3302265a5bc8d7c72f6c712df00d"} err="failed to get container status \"6bb8b6bb8c8aeb9dc622fb5d65252eafbb9f3302265a5bc8d7c72f6c712df00d\": rpc error: code = NotFound desc = could not find container \"6bb8b6bb8c8aeb9dc622fb5d65252eafbb9f3302265a5bc8d7c72f6c712df00d\": container with ID starting with 6bb8b6bb8c8aeb9dc622fb5d65252eafbb9f3302265a5bc8d7c72f6c712df00d not found: ID does not exist" Sep 29 19:17:12 crc kubenswrapper[4792]: I0929 19:17:12.719081 4792 scope.go:117] "RemoveContainer" containerID="8216b4df3729c31b616b74070b4d0f867f577f818e1481d8960b3d56615d7ad0" Sep 29 19:17:12 crc kubenswrapper[4792]: E0929 19:17:12.720558 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8216b4df3729c31b616b74070b4d0f867f577f818e1481d8960b3d56615d7ad0\": container with ID starting with 8216b4df3729c31b616b74070b4d0f867f577f818e1481d8960b3d56615d7ad0 not found: ID does not exist" containerID="8216b4df3729c31b616b74070b4d0f867f577f818e1481d8960b3d56615d7ad0" Sep 29 19:17:12 crc kubenswrapper[4792]: I0929 19:17:12.720584 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8216b4df3729c31b616b74070b4d0f867f577f818e1481d8960b3d56615d7ad0"} err="failed to get container status \"8216b4df3729c31b616b74070b4d0f867f577f818e1481d8960b3d56615d7ad0\": rpc error: code = NotFound desc = could not find container \"8216b4df3729c31b616b74070b4d0f867f577f818e1481d8960b3d56615d7ad0\": container with ID starting with 8216b4df3729c31b616b74070b4d0f867f577f818e1481d8960b3d56615d7ad0 not found: ID does not exist" Sep 29 19:17:12 crc kubenswrapper[4792]: I0929 19:17:12.720601 4792 scope.go:117] "RemoveContainer" containerID="a6cba20d382b798595dfd1b557b6e7980a4e355e111516025afd83b40a93612d" Sep 29 19:17:12 crc kubenswrapper[4792]: E0929 19:17:12.720799 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a6cba20d382b798595dfd1b557b6e7980a4e355e111516025afd83b40a93612d\": container with ID starting with a6cba20d382b798595dfd1b557b6e7980a4e355e111516025afd83b40a93612d not found: ID does not exist" containerID="a6cba20d382b798595dfd1b557b6e7980a4e355e111516025afd83b40a93612d" Sep 29 19:17:12 crc kubenswrapper[4792]: I0929 19:17:12.720819 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a6cba20d382b798595dfd1b557b6e7980a4e355e111516025afd83b40a93612d"} err="failed to get container status \"a6cba20d382b798595dfd1b557b6e7980a4e355e111516025afd83b40a93612d\": rpc error: code = NotFound desc = could not find container \"a6cba20d382b798595dfd1b557b6e7980a4e355e111516025afd83b40a93612d\": container with ID starting with a6cba20d382b798595dfd1b557b6e7980a4e355e111516025afd83b40a93612d not found: ID does not exist" Sep 29 19:17:12 crc kubenswrapper[4792]: I0929 19:17:12.720833 4792 scope.go:117] "RemoveContainer" containerID="996fb6aae79f601f4893d0db53352ced763636f282dc780968cda5952ea7a977" Sep 29 19:17:12 crc kubenswrapper[4792]: I0929 19:17:12.721119 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"996fb6aae79f601f4893d0db53352ced763636f282dc780968cda5952ea7a977"} err="failed to get container status \"996fb6aae79f601f4893d0db53352ced763636f282dc780968cda5952ea7a977\": rpc error: code = NotFound desc = could not find container \"996fb6aae79f601f4893d0db53352ced763636f282dc780968cda5952ea7a977\": container with ID starting with 996fb6aae79f601f4893d0db53352ced763636f282dc780968cda5952ea7a977 not found: ID does not exist" Sep 29 19:17:12 crc kubenswrapper[4792]: I0929 19:17:12.721165 4792 scope.go:117] "RemoveContainer" containerID="6bb8b6bb8c8aeb9dc622fb5d65252eafbb9f3302265a5bc8d7c72f6c712df00d" Sep 29 19:17:12 crc kubenswrapper[4792]: I0929 19:17:12.722866 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6bb8b6bb8c8aeb9dc622fb5d65252eafbb9f3302265a5bc8d7c72f6c712df00d"} err="failed to get container status \"6bb8b6bb8c8aeb9dc622fb5d65252eafbb9f3302265a5bc8d7c72f6c712df00d\": rpc error: code = NotFound desc = could not find container \"6bb8b6bb8c8aeb9dc622fb5d65252eafbb9f3302265a5bc8d7c72f6c712df00d\": container with ID starting with 6bb8b6bb8c8aeb9dc622fb5d65252eafbb9f3302265a5bc8d7c72f6c712df00d not found: ID does not exist" Sep 29 19:17:12 crc kubenswrapper[4792]: I0929 19:17:12.722893 4792 scope.go:117] "RemoveContainer" containerID="8216b4df3729c31b616b74070b4d0f867f577f818e1481d8960b3d56615d7ad0" Sep 29 19:17:12 crc kubenswrapper[4792]: I0929 19:17:12.723402 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8216b4df3729c31b616b74070b4d0f867f577f818e1481d8960b3d56615d7ad0"} err="failed to get container status \"8216b4df3729c31b616b74070b4d0f867f577f818e1481d8960b3d56615d7ad0\": rpc error: code = NotFound desc = could not find container \"8216b4df3729c31b616b74070b4d0f867f577f818e1481d8960b3d56615d7ad0\": container with ID starting with 8216b4df3729c31b616b74070b4d0f867f577f818e1481d8960b3d56615d7ad0 not found: ID does not exist" Sep 29 19:17:12 crc kubenswrapper[4792]: I0929 19:17:12.723422 4792 scope.go:117] "RemoveContainer" containerID="a6cba20d382b798595dfd1b557b6e7980a4e355e111516025afd83b40a93612d" Sep 29 19:17:12 crc kubenswrapper[4792]: I0929 19:17:12.727122 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a6cba20d382b798595dfd1b557b6e7980a4e355e111516025afd83b40a93612d"} err="failed to get container status \"a6cba20d382b798595dfd1b557b6e7980a4e355e111516025afd83b40a93612d\": rpc error: code = NotFound desc = could not find container \"a6cba20d382b798595dfd1b557b6e7980a4e355e111516025afd83b40a93612d\": container with ID starting with a6cba20d382b798595dfd1b557b6e7980a4e355e111516025afd83b40a93612d not found: ID does not exist" Sep 29 19:17:12 crc kubenswrapper[4792]: I0929 19:17:12.727148 4792 scope.go:117] "RemoveContainer" containerID="996fb6aae79f601f4893d0db53352ced763636f282dc780968cda5952ea7a977" Sep 29 19:17:12 crc kubenswrapper[4792]: I0929 19:17:12.727450 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"996fb6aae79f601f4893d0db53352ced763636f282dc780968cda5952ea7a977"} err="failed to get container status \"996fb6aae79f601f4893d0db53352ced763636f282dc780968cda5952ea7a977\": rpc error: code = NotFound desc = could not find container \"996fb6aae79f601f4893d0db53352ced763636f282dc780968cda5952ea7a977\": container with ID starting with 996fb6aae79f601f4893d0db53352ced763636f282dc780968cda5952ea7a977 not found: ID does not exist" Sep 29 19:17:12 crc kubenswrapper[4792]: I0929 19:17:12.727488 4792 scope.go:117] "RemoveContainer" containerID="6bb8b6bb8c8aeb9dc622fb5d65252eafbb9f3302265a5bc8d7c72f6c712df00d" Sep 29 19:17:12 crc kubenswrapper[4792]: I0929 19:17:12.728725 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6bb8b6bb8c8aeb9dc622fb5d65252eafbb9f3302265a5bc8d7c72f6c712df00d"} err="failed to get container status \"6bb8b6bb8c8aeb9dc622fb5d65252eafbb9f3302265a5bc8d7c72f6c712df00d\": rpc error: code = NotFound desc = could not find container \"6bb8b6bb8c8aeb9dc622fb5d65252eafbb9f3302265a5bc8d7c72f6c712df00d\": container with ID starting with 6bb8b6bb8c8aeb9dc622fb5d65252eafbb9f3302265a5bc8d7c72f6c712df00d not found: ID does not exist" Sep 29 19:17:12 crc kubenswrapper[4792]: I0929 19:17:12.728747 4792 scope.go:117] "RemoveContainer" containerID="8216b4df3729c31b616b74070b4d0f867f577f818e1481d8960b3d56615d7ad0" Sep 29 19:17:12 crc kubenswrapper[4792]: I0929 19:17:12.728991 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8216b4df3729c31b616b74070b4d0f867f577f818e1481d8960b3d56615d7ad0"} err="failed to get container status \"8216b4df3729c31b616b74070b4d0f867f577f818e1481d8960b3d56615d7ad0\": rpc error: code = NotFound desc = could not find container \"8216b4df3729c31b616b74070b4d0f867f577f818e1481d8960b3d56615d7ad0\": container with ID starting with 8216b4df3729c31b616b74070b4d0f867f577f818e1481d8960b3d56615d7ad0 not found: ID does not exist" Sep 29 19:17:12 crc kubenswrapper[4792]: I0929 19:17:12.729014 4792 scope.go:117] "RemoveContainer" containerID="a6cba20d382b798595dfd1b557b6e7980a4e355e111516025afd83b40a93612d" Sep 29 19:17:12 crc kubenswrapper[4792]: I0929 19:17:12.729947 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a6cba20d382b798595dfd1b557b6e7980a4e355e111516025afd83b40a93612d"} err="failed to get container status \"a6cba20d382b798595dfd1b557b6e7980a4e355e111516025afd83b40a93612d\": rpc error: code = NotFound desc = could not find container \"a6cba20d382b798595dfd1b557b6e7980a4e355e111516025afd83b40a93612d\": container with ID starting with a6cba20d382b798595dfd1b557b6e7980a4e355e111516025afd83b40a93612d not found: ID does not exist" Sep 29 19:17:12 crc kubenswrapper[4792]: I0929 19:17:12.729974 4792 scope.go:117] "RemoveContainer" containerID="996fb6aae79f601f4893d0db53352ced763636f282dc780968cda5952ea7a977" Sep 29 19:17:12 crc kubenswrapper[4792]: I0929 19:17:12.730233 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"996fb6aae79f601f4893d0db53352ced763636f282dc780968cda5952ea7a977"} err="failed to get container status \"996fb6aae79f601f4893d0db53352ced763636f282dc780968cda5952ea7a977\": rpc error: code = NotFound desc = could not find container \"996fb6aae79f601f4893d0db53352ced763636f282dc780968cda5952ea7a977\": container with ID starting with 996fb6aae79f601f4893d0db53352ced763636f282dc780968cda5952ea7a977 not found: ID does not exist" Sep 29 19:17:12 crc kubenswrapper[4792]: I0929 19:17:12.730259 4792 scope.go:117] "RemoveContainer" containerID="6bb8b6bb8c8aeb9dc622fb5d65252eafbb9f3302265a5bc8d7c72f6c712df00d" Sep 29 19:17:12 crc kubenswrapper[4792]: I0929 19:17:12.730456 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6bb8b6bb8c8aeb9dc622fb5d65252eafbb9f3302265a5bc8d7c72f6c712df00d"} err="failed to get container status \"6bb8b6bb8c8aeb9dc622fb5d65252eafbb9f3302265a5bc8d7c72f6c712df00d\": rpc error: code = NotFound desc = could not find container \"6bb8b6bb8c8aeb9dc622fb5d65252eafbb9f3302265a5bc8d7c72f6c712df00d\": container with ID starting with 6bb8b6bb8c8aeb9dc622fb5d65252eafbb9f3302265a5bc8d7c72f6c712df00d not found: ID does not exist" Sep 29 19:17:12 crc kubenswrapper[4792]: I0929 19:17:12.730479 4792 scope.go:117] "RemoveContainer" containerID="8216b4df3729c31b616b74070b4d0f867f577f818e1481d8960b3d56615d7ad0" Sep 29 19:17:12 crc kubenswrapper[4792]: I0929 19:17:12.732007 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8216b4df3729c31b616b74070b4d0f867f577f818e1481d8960b3d56615d7ad0"} err="failed to get container status \"8216b4df3729c31b616b74070b4d0f867f577f818e1481d8960b3d56615d7ad0\": rpc error: code = NotFound desc = could not find container \"8216b4df3729c31b616b74070b4d0f867f577f818e1481d8960b3d56615d7ad0\": container with ID starting with 8216b4df3729c31b616b74070b4d0f867f577f818e1481d8960b3d56615d7ad0 not found: ID does not exist" Sep 29 19:17:12 crc kubenswrapper[4792]: I0929 19:17:12.732036 4792 scope.go:117] "RemoveContainer" containerID="a6cba20d382b798595dfd1b557b6e7980a4e355e111516025afd83b40a93612d" Sep 29 19:17:12 crc kubenswrapper[4792]: I0929 19:17:12.732374 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a6cba20d382b798595dfd1b557b6e7980a4e355e111516025afd83b40a93612d"} err="failed to get container status \"a6cba20d382b798595dfd1b557b6e7980a4e355e111516025afd83b40a93612d\": rpc error: code = NotFound desc = could not find container \"a6cba20d382b798595dfd1b557b6e7980a4e355e111516025afd83b40a93612d\": container with ID starting with a6cba20d382b798595dfd1b557b6e7980a4e355e111516025afd83b40a93612d not found: ID does not exist" Sep 29 19:17:12 crc kubenswrapper[4792]: I0929 19:17:12.777635 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e77007af-4255-4480-94c7-acb59becda59-combined-ca-bundle\") pod \"e77007af-4255-4480-94c7-acb59becda59\" (UID: \"e77007af-4255-4480-94c7-acb59becda59\") " Sep 29 19:17:12 crc kubenswrapper[4792]: I0929 19:17:12.777685 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e77007af-4255-4480-94c7-acb59becda59-run-httpd\") pod \"e77007af-4255-4480-94c7-acb59becda59\" (UID: \"e77007af-4255-4480-94c7-acb59becda59\") " Sep 29 19:17:12 crc kubenswrapper[4792]: I0929 19:17:12.777710 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e77007af-4255-4480-94c7-acb59becda59-log-httpd\") pod \"e77007af-4255-4480-94c7-acb59becda59\" (UID: \"e77007af-4255-4480-94c7-acb59becda59\") " Sep 29 19:17:12 crc kubenswrapper[4792]: I0929 19:17:12.777740 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e77007af-4255-4480-94c7-acb59becda59-scripts\") pod \"e77007af-4255-4480-94c7-acb59becda59\" (UID: \"e77007af-4255-4480-94c7-acb59becda59\") " Sep 29 19:17:12 crc kubenswrapper[4792]: I0929 19:17:12.777947 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/e77007af-4255-4480-94c7-acb59becda59-sg-core-conf-yaml\") pod \"e77007af-4255-4480-94c7-acb59becda59\" (UID: \"e77007af-4255-4480-94c7-acb59becda59\") " Sep 29 19:17:12 crc kubenswrapper[4792]: I0929 19:17:12.777965 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e77007af-4255-4480-94c7-acb59becda59-config-data\") pod \"e77007af-4255-4480-94c7-acb59becda59\" (UID: \"e77007af-4255-4480-94c7-acb59becda59\") " Sep 29 19:17:12 crc kubenswrapper[4792]: I0929 19:17:12.778018 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e77007af-4255-4480-94c7-acb59becda59-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "e77007af-4255-4480-94c7-acb59becda59" (UID: "e77007af-4255-4480-94c7-acb59becda59"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 19:17:12 crc kubenswrapper[4792]: I0929 19:17:12.778159 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e77007af-4255-4480-94c7-acb59becda59-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "e77007af-4255-4480-94c7-acb59becda59" (UID: "e77007af-4255-4480-94c7-acb59becda59"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 19:17:12 crc kubenswrapper[4792]: I0929 19:17:12.778509 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-g7j2m\" (UniqueName: \"kubernetes.io/projected/e77007af-4255-4480-94c7-acb59becda59-kube-api-access-g7j2m\") pod \"e77007af-4255-4480-94c7-acb59becda59\" (UID: \"e77007af-4255-4480-94c7-acb59becda59\") " Sep 29 19:17:12 crc kubenswrapper[4792]: I0929 19:17:12.778994 4792 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e77007af-4255-4480-94c7-acb59becda59-run-httpd\") on node \"crc\" DevicePath \"\"" Sep 29 19:17:12 crc kubenswrapper[4792]: I0929 19:17:12.779006 4792 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e77007af-4255-4480-94c7-acb59becda59-log-httpd\") on node \"crc\" DevicePath \"\"" Sep 29 19:17:12 crc kubenswrapper[4792]: I0929 19:17:12.785006 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e77007af-4255-4480-94c7-acb59becda59-scripts" (OuterVolumeSpecName: "scripts") pod "e77007af-4255-4480-94c7-acb59becda59" (UID: "e77007af-4255-4480-94c7-acb59becda59"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:17:12 crc kubenswrapper[4792]: I0929 19:17:12.790242 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e77007af-4255-4480-94c7-acb59becda59-kube-api-access-g7j2m" (OuterVolumeSpecName: "kube-api-access-g7j2m") pod "e77007af-4255-4480-94c7-acb59becda59" (UID: "e77007af-4255-4480-94c7-acb59becda59"). InnerVolumeSpecName "kube-api-access-g7j2m". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:17:12 crc kubenswrapper[4792]: I0929 19:17:12.808478 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e77007af-4255-4480-94c7-acb59becda59-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "e77007af-4255-4480-94c7-acb59becda59" (UID: "e77007af-4255-4480-94c7-acb59becda59"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:17:12 crc kubenswrapper[4792]: I0929 19:17:12.880271 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-g7j2m\" (UniqueName: \"kubernetes.io/projected/e77007af-4255-4480-94c7-acb59becda59-kube-api-access-g7j2m\") on node \"crc\" DevicePath \"\"" Sep 29 19:17:12 crc kubenswrapper[4792]: I0929 19:17:12.880309 4792 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e77007af-4255-4480-94c7-acb59becda59-scripts\") on node \"crc\" DevicePath \"\"" Sep 29 19:17:12 crc kubenswrapper[4792]: I0929 19:17:12.880322 4792 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/e77007af-4255-4480-94c7-acb59becda59-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Sep 29 19:17:12 crc kubenswrapper[4792]: I0929 19:17:12.883675 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e77007af-4255-4480-94c7-acb59becda59-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "e77007af-4255-4480-94c7-acb59becda59" (UID: "e77007af-4255-4480-94c7-acb59becda59"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:17:12 crc kubenswrapper[4792]: I0929 19:17:12.907453 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e77007af-4255-4480-94c7-acb59becda59-config-data" (OuterVolumeSpecName: "config-data") pod "e77007af-4255-4480-94c7-acb59becda59" (UID: "e77007af-4255-4480-94c7-acb59becda59"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:17:12 crc kubenswrapper[4792]: I0929 19:17:12.955524 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-api-0" podStartSLOduration=3.955500048 podStartE2EDuration="3.955500048s" podCreationTimestamp="2025-09-29 19:17:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 19:17:12.658980186 +0000 UTC m=+1244.652287602" watchObservedRunningTime="2025-09-29 19:17:12.955500048 +0000 UTC m=+1244.948807454" Sep 29 19:17:12 crc kubenswrapper[4792]: I0929 19:17:12.959718 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Sep 29 19:17:12 crc kubenswrapper[4792]: I0929 19:17:12.967367 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Sep 29 19:17:12 crc kubenswrapper[4792]: I0929 19:17:12.982658 4792 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e77007af-4255-4480-94c7-acb59becda59-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 19:17:12 crc kubenswrapper[4792]: I0929 19:17:12.982704 4792 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e77007af-4255-4480-94c7-acb59becda59-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 19:17:12 crc kubenswrapper[4792]: I0929 19:17:12.987013 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Sep 29 19:17:12 crc kubenswrapper[4792]: E0929 19:17:12.987607 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e77007af-4255-4480-94c7-acb59becda59" containerName="sg-core" Sep 29 19:17:12 crc kubenswrapper[4792]: I0929 19:17:12.987700 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="e77007af-4255-4480-94c7-acb59becda59" containerName="sg-core" Sep 29 19:17:12 crc kubenswrapper[4792]: E0929 19:17:12.987815 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e77007af-4255-4480-94c7-acb59becda59" containerName="ceilometer-central-agent" Sep 29 19:17:12 crc kubenswrapper[4792]: I0929 19:17:12.987932 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="e77007af-4255-4480-94c7-acb59becda59" containerName="ceilometer-central-agent" Sep 29 19:17:12 crc kubenswrapper[4792]: E0929 19:17:12.988033 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e77007af-4255-4480-94c7-acb59becda59" containerName="ceilometer-notification-agent" Sep 29 19:17:12 crc kubenswrapper[4792]: I0929 19:17:12.988105 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="e77007af-4255-4480-94c7-acb59becda59" containerName="ceilometer-notification-agent" Sep 29 19:17:12 crc kubenswrapper[4792]: E0929 19:17:12.988181 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e77007af-4255-4480-94c7-acb59becda59" containerName="proxy-httpd" Sep 29 19:17:12 crc kubenswrapper[4792]: I0929 19:17:12.988412 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="e77007af-4255-4480-94c7-acb59becda59" containerName="proxy-httpd" Sep 29 19:17:12 crc kubenswrapper[4792]: I0929 19:17:12.988717 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="e77007af-4255-4480-94c7-acb59becda59" containerName="ceilometer-central-agent" Sep 29 19:17:12 crc kubenswrapper[4792]: I0929 19:17:12.988818 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="e77007af-4255-4480-94c7-acb59becda59" containerName="proxy-httpd" Sep 29 19:17:12 crc kubenswrapper[4792]: I0929 19:17:12.988932 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="e77007af-4255-4480-94c7-acb59becda59" containerName="ceilometer-notification-agent" Sep 29 19:17:12 crc kubenswrapper[4792]: I0929 19:17:12.989026 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="e77007af-4255-4480-94c7-acb59becda59" containerName="sg-core" Sep 29 19:17:12 crc kubenswrapper[4792]: I0929 19:17:12.991139 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 29 19:17:12 crc kubenswrapper[4792]: I0929 19:17:12.995992 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Sep 29 19:17:12 crc kubenswrapper[4792]: I0929 19:17:12.999473 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Sep 29 19:17:13 crc kubenswrapper[4792]: I0929 19:17:13.007406 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Sep 29 19:17:13 crc kubenswrapper[4792]: I0929 19:17:13.038508 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e77007af-4255-4480-94c7-acb59becda59" path="/var/lib/kubelet/pods/e77007af-4255-4480-94c7-acb59becda59/volumes" Sep 29 19:17:13 crc kubenswrapper[4792]: I0929 19:17:13.090255 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3fb6db85-a6bd-485b-a235-ac35bd267ef5-run-httpd\") pod \"ceilometer-0\" (UID: \"3fb6db85-a6bd-485b-a235-ac35bd267ef5\") " pod="openstack/ceilometer-0" Sep 29 19:17:13 crc kubenswrapper[4792]: I0929 19:17:13.090410 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hds6s\" (UniqueName: \"kubernetes.io/projected/3fb6db85-a6bd-485b-a235-ac35bd267ef5-kube-api-access-hds6s\") pod \"ceilometer-0\" (UID: \"3fb6db85-a6bd-485b-a235-ac35bd267ef5\") " pod="openstack/ceilometer-0" Sep 29 19:17:13 crc kubenswrapper[4792]: I0929 19:17:13.090496 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/3fb6db85-a6bd-485b-a235-ac35bd267ef5-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"3fb6db85-a6bd-485b-a235-ac35bd267ef5\") " pod="openstack/ceilometer-0" Sep 29 19:17:13 crc kubenswrapper[4792]: I0929 19:17:13.091547 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3fb6db85-a6bd-485b-a235-ac35bd267ef5-config-data\") pod \"ceilometer-0\" (UID: \"3fb6db85-a6bd-485b-a235-ac35bd267ef5\") " pod="openstack/ceilometer-0" Sep 29 19:17:13 crc kubenswrapper[4792]: I0929 19:17:13.091864 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3fb6db85-a6bd-485b-a235-ac35bd267ef5-scripts\") pod \"ceilometer-0\" (UID: \"3fb6db85-a6bd-485b-a235-ac35bd267ef5\") " pod="openstack/ceilometer-0" Sep 29 19:17:13 crc kubenswrapper[4792]: I0929 19:17:13.091937 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3fb6db85-a6bd-485b-a235-ac35bd267ef5-log-httpd\") pod \"ceilometer-0\" (UID: \"3fb6db85-a6bd-485b-a235-ac35bd267ef5\") " pod="openstack/ceilometer-0" Sep 29 19:17:13 crc kubenswrapper[4792]: I0929 19:17:13.092036 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3fb6db85-a6bd-485b-a235-ac35bd267ef5-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"3fb6db85-a6bd-485b-a235-ac35bd267ef5\") " pod="openstack/ceilometer-0" Sep 29 19:17:13 crc kubenswrapper[4792]: I0929 19:17:13.193579 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3fb6db85-a6bd-485b-a235-ac35bd267ef5-scripts\") pod \"ceilometer-0\" (UID: \"3fb6db85-a6bd-485b-a235-ac35bd267ef5\") " pod="openstack/ceilometer-0" Sep 29 19:17:13 crc kubenswrapper[4792]: I0929 19:17:13.193642 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3fb6db85-a6bd-485b-a235-ac35bd267ef5-log-httpd\") pod \"ceilometer-0\" (UID: \"3fb6db85-a6bd-485b-a235-ac35bd267ef5\") " pod="openstack/ceilometer-0" Sep 29 19:17:13 crc kubenswrapper[4792]: I0929 19:17:13.193689 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3fb6db85-a6bd-485b-a235-ac35bd267ef5-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"3fb6db85-a6bd-485b-a235-ac35bd267ef5\") " pod="openstack/ceilometer-0" Sep 29 19:17:13 crc kubenswrapper[4792]: I0929 19:17:13.193709 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3fb6db85-a6bd-485b-a235-ac35bd267ef5-run-httpd\") pod \"ceilometer-0\" (UID: \"3fb6db85-a6bd-485b-a235-ac35bd267ef5\") " pod="openstack/ceilometer-0" Sep 29 19:17:13 crc kubenswrapper[4792]: I0929 19:17:13.193843 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hds6s\" (UniqueName: \"kubernetes.io/projected/3fb6db85-a6bd-485b-a235-ac35bd267ef5-kube-api-access-hds6s\") pod \"ceilometer-0\" (UID: \"3fb6db85-a6bd-485b-a235-ac35bd267ef5\") " pod="openstack/ceilometer-0" Sep 29 19:17:13 crc kubenswrapper[4792]: I0929 19:17:13.193913 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/3fb6db85-a6bd-485b-a235-ac35bd267ef5-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"3fb6db85-a6bd-485b-a235-ac35bd267ef5\") " pod="openstack/ceilometer-0" Sep 29 19:17:13 crc kubenswrapper[4792]: I0929 19:17:13.193939 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3fb6db85-a6bd-485b-a235-ac35bd267ef5-config-data\") pod \"ceilometer-0\" (UID: \"3fb6db85-a6bd-485b-a235-ac35bd267ef5\") " pod="openstack/ceilometer-0" Sep 29 19:17:13 crc kubenswrapper[4792]: I0929 19:17:13.195971 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3fb6db85-a6bd-485b-a235-ac35bd267ef5-log-httpd\") pod \"ceilometer-0\" (UID: \"3fb6db85-a6bd-485b-a235-ac35bd267ef5\") " pod="openstack/ceilometer-0" Sep 29 19:17:13 crc kubenswrapper[4792]: I0929 19:17:13.196192 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3fb6db85-a6bd-485b-a235-ac35bd267ef5-run-httpd\") pod \"ceilometer-0\" (UID: \"3fb6db85-a6bd-485b-a235-ac35bd267ef5\") " pod="openstack/ceilometer-0" Sep 29 19:17:13 crc kubenswrapper[4792]: I0929 19:17:13.200036 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3fb6db85-a6bd-485b-a235-ac35bd267ef5-scripts\") pod \"ceilometer-0\" (UID: \"3fb6db85-a6bd-485b-a235-ac35bd267ef5\") " pod="openstack/ceilometer-0" Sep 29 19:17:13 crc kubenswrapper[4792]: I0929 19:17:13.202602 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/3fb6db85-a6bd-485b-a235-ac35bd267ef5-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"3fb6db85-a6bd-485b-a235-ac35bd267ef5\") " pod="openstack/ceilometer-0" Sep 29 19:17:13 crc kubenswrapper[4792]: I0929 19:17:13.203588 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3fb6db85-a6bd-485b-a235-ac35bd267ef5-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"3fb6db85-a6bd-485b-a235-ac35bd267ef5\") " pod="openstack/ceilometer-0" Sep 29 19:17:13 crc kubenswrapper[4792]: I0929 19:17:13.205241 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3fb6db85-a6bd-485b-a235-ac35bd267ef5-config-data\") pod \"ceilometer-0\" (UID: \"3fb6db85-a6bd-485b-a235-ac35bd267ef5\") " pod="openstack/ceilometer-0" Sep 29 19:17:13 crc kubenswrapper[4792]: I0929 19:17:13.213881 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hds6s\" (UniqueName: \"kubernetes.io/projected/3fb6db85-a6bd-485b-a235-ac35bd267ef5-kube-api-access-hds6s\") pod \"ceilometer-0\" (UID: \"3fb6db85-a6bd-485b-a235-ac35bd267ef5\") " pod="openstack/ceilometer-0" Sep 29 19:17:13 crc kubenswrapper[4792]: I0929 19:17:13.309743 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 29 19:17:13 crc kubenswrapper[4792]: I0929 19:17:13.624736 4792 generic.go:334] "Generic (PLEG): container finished" podID="eb79e1ef-cf0c-407b-9b37-c7ad8d65a3cc" containerID="30df7ac6c56065d05590b40fa8b60c11ef56e2f8dbc338a1dc4730d3f00fb6e0" exitCode=137 Sep 29 19:17:13 crc kubenswrapper[4792]: I0929 19:17:13.624785 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-dfd9c6b56-wq84c" event={"ID":"eb79e1ef-cf0c-407b-9b37-c7ad8d65a3cc","Type":"ContainerDied","Data":"30df7ac6c56065d05590b40fa8b60c11ef56e2f8dbc338a1dc4730d3f00fb6e0"} Sep 29 19:17:13 crc kubenswrapper[4792]: I0929 19:17:13.625089 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-dfd9c6b56-wq84c" event={"ID":"eb79e1ef-cf0c-407b-9b37-c7ad8d65a3cc","Type":"ContainerStarted","Data":"33392c65aaa8fe7892add1ddea19e02248b8bee9d86ee8c7d1300e0357c0e768"} Sep 29 19:17:13 crc kubenswrapper[4792]: I0929 19:17:13.628699 4792 generic.go:334] "Generic (PLEG): container finished" podID="23845288-b122-49f0-b10d-641cfb94b66f" containerID="e157e75d292130bd4389d439006d4ea52a41ceebecf0771101d06500e2e20e69" exitCode=137 Sep 29 19:17:13 crc kubenswrapper[4792]: I0929 19:17:13.628758 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-8494dffd6-7rx5p" event={"ID":"23845288-b122-49f0-b10d-641cfb94b66f","Type":"ContainerDied","Data":"e157e75d292130bd4389d439006d4ea52a41ceebecf0771101d06500e2e20e69"} Sep 29 19:17:13 crc kubenswrapper[4792]: I0929 19:17:13.628789 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-8494dffd6-7rx5p" event={"ID":"23845288-b122-49f0-b10d-641cfb94b66f","Type":"ContainerStarted","Data":"c6ef06043827b020305da62c029cb96e311543453b207384da400ffdbe2cc83c"} Sep 29 19:17:13 crc kubenswrapper[4792]: W0929 19:17:13.794415 4792 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3fb6db85_a6bd_485b_a235_ac35bd267ef5.slice/crio-918f1840e73cf575a65cc230d10c1e9a67668a22d39ec24c9bb85772ea9d559d WatchSource:0}: Error finding container 918f1840e73cf575a65cc230d10c1e9a67668a22d39ec24c9bb85772ea9d559d: Status 404 returned error can't find the container with id 918f1840e73cf575a65cc230d10c1e9a67668a22d39ec24c9bb85772ea9d559d Sep 29 19:17:13 crc kubenswrapper[4792]: I0929 19:17:13.797557 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Sep 29 19:17:14 crc kubenswrapper[4792]: I0929 19:17:14.638878 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"3fb6db85-a6bd-485b-a235-ac35bd267ef5","Type":"ContainerStarted","Data":"bded3e114782abceefa48b41c96909285dd22b518ab768f153d3c92226879e4a"} Sep 29 19:17:14 crc kubenswrapper[4792]: I0929 19:17:14.639214 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"3fb6db85-a6bd-485b-a235-ac35bd267ef5","Type":"ContainerStarted","Data":"918f1840e73cf575a65cc230d10c1e9a67668a22d39ec24c9bb85772ea9d559d"} Sep 29 19:17:15 crc kubenswrapper[4792]: I0929 19:17:15.663883 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"3fb6db85-a6bd-485b-a235-ac35bd267ef5","Type":"ContainerStarted","Data":"f2d02644a37545ae9e9d303a10ec5c1c6fde03ca03b630f4c884c6d57e55e3e9"} Sep 29 19:17:15 crc kubenswrapper[4792]: I0929 19:17:15.879671 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Sep 29 19:17:15 crc kubenswrapper[4792]: I0929 19:17:15.879714 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Sep 29 19:17:15 crc kubenswrapper[4792]: I0929 19:17:15.921596 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Sep 29 19:17:15 crc kubenswrapper[4792]: I0929 19:17:15.936888 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Sep 29 19:17:16 crc kubenswrapper[4792]: I0929 19:17:16.200487 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Sep 29 19:17:16 crc kubenswrapper[4792]: I0929 19:17:16.200527 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Sep 29 19:17:16 crc kubenswrapper[4792]: I0929 19:17:16.244795 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Sep 29 19:17:16 crc kubenswrapper[4792]: I0929 19:17:16.268822 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Sep 29 19:17:16 crc kubenswrapper[4792]: I0929 19:17:16.675372 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"3fb6db85-a6bd-485b-a235-ac35bd267ef5","Type":"ContainerStarted","Data":"ad9d7daabe2b8aec6b8fbb142b7fd3990380503eb15d9da89814be6ff90eed99"} Sep 29 19:17:16 crc kubenswrapper[4792]: I0929 19:17:16.678880 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Sep 29 19:17:16 crc kubenswrapper[4792]: I0929 19:17:16.678972 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Sep 29 19:17:16 crc kubenswrapper[4792]: I0929 19:17:16.679033 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Sep 29 19:17:16 crc kubenswrapper[4792]: I0929 19:17:16.679126 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Sep 29 19:17:18 crc kubenswrapper[4792]: I0929 19:17:18.690417 4792 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Sep 29 19:17:18 crc kubenswrapper[4792]: I0929 19:17:18.691486 4792 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Sep 29 19:17:18 crc kubenswrapper[4792]: I0929 19:17:18.692403 4792 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Sep 29 19:17:18 crc kubenswrapper[4792]: I0929 19:17:18.692476 4792 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Sep 29 19:17:18 crc kubenswrapper[4792]: I0929 19:17:18.836301 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Sep 29 19:17:19 crc kubenswrapper[4792]: I0929 19:17:19.723412 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"3fb6db85-a6bd-485b-a235-ac35bd267ef5","Type":"ContainerStarted","Data":"6751b4a9361e85878ad03e18cad2a00fdf96cfb86a09f5026c14552aa6e567ab"} Sep 29 19:17:19 crc kubenswrapper[4792]: I0929 19:17:19.723784 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="3fb6db85-a6bd-485b-a235-ac35bd267ef5" containerName="ceilometer-central-agent" containerID="cri-o://bded3e114782abceefa48b41c96909285dd22b518ab768f153d3c92226879e4a" gracePeriod=30 Sep 29 19:17:19 crc kubenswrapper[4792]: I0929 19:17:19.724566 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Sep 29 19:17:19 crc kubenswrapper[4792]: I0929 19:17:19.724824 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="3fb6db85-a6bd-485b-a235-ac35bd267ef5" containerName="sg-core" containerID="cri-o://ad9d7daabe2b8aec6b8fbb142b7fd3990380503eb15d9da89814be6ff90eed99" gracePeriod=30 Sep 29 19:17:19 crc kubenswrapper[4792]: I0929 19:17:19.724881 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="3fb6db85-a6bd-485b-a235-ac35bd267ef5" containerName="ceilometer-notification-agent" containerID="cri-o://f2d02644a37545ae9e9d303a10ec5c1c6fde03ca03b630f4c884c6d57e55e3e9" gracePeriod=30 Sep 29 19:17:19 crc kubenswrapper[4792]: I0929 19:17:19.724817 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="3fb6db85-a6bd-485b-a235-ac35bd267ef5" containerName="proxy-httpd" containerID="cri-o://6751b4a9361e85878ad03e18cad2a00fdf96cfb86a09f5026c14552aa6e567ab" gracePeriod=30 Sep 29 19:17:19 crc kubenswrapper[4792]: I0929 19:17:19.755946 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.1696439339999998 podStartE2EDuration="7.75592182s" podCreationTimestamp="2025-09-29 19:17:12 +0000 UTC" firstStartedPulling="2025-09-29 19:17:13.795616195 +0000 UTC m=+1245.788923591" lastFinishedPulling="2025-09-29 19:17:19.381894081 +0000 UTC m=+1251.375201477" observedRunningTime="2025-09-29 19:17:19.745409657 +0000 UTC m=+1251.738717053" watchObservedRunningTime="2025-09-29 19:17:19.75592182 +0000 UTC m=+1251.749229216" Sep 29 19:17:20 crc kubenswrapper[4792]: I0929 19:17:20.037698 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Sep 29 19:17:20 crc kubenswrapper[4792]: I0929 19:17:20.037822 4792 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Sep 29 19:17:20 crc kubenswrapper[4792]: I0929 19:17:20.050894 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Sep 29 19:17:20 crc kubenswrapper[4792]: I0929 19:17:20.116275 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Sep 29 19:17:20 crc kubenswrapper[4792]: I0929 19:17:20.116355 4792 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Sep 29 19:17:20 crc kubenswrapper[4792]: I0929 19:17:20.120512 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Sep 29 19:17:20 crc kubenswrapper[4792]: I0929 19:17:20.736095 4792 generic.go:334] "Generic (PLEG): container finished" podID="3fb6db85-a6bd-485b-a235-ac35bd267ef5" containerID="ad9d7daabe2b8aec6b8fbb142b7fd3990380503eb15d9da89814be6ff90eed99" exitCode=2 Sep 29 19:17:20 crc kubenswrapper[4792]: I0929 19:17:20.736390 4792 generic.go:334] "Generic (PLEG): container finished" podID="3fb6db85-a6bd-485b-a235-ac35bd267ef5" containerID="f2d02644a37545ae9e9d303a10ec5c1c6fde03ca03b630f4c884c6d57e55e3e9" exitCode=0 Sep 29 19:17:20 crc kubenswrapper[4792]: I0929 19:17:20.736162 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"3fb6db85-a6bd-485b-a235-ac35bd267ef5","Type":"ContainerDied","Data":"ad9d7daabe2b8aec6b8fbb142b7fd3990380503eb15d9da89814be6ff90eed99"} Sep 29 19:17:20 crc kubenswrapper[4792]: I0929 19:17:20.736725 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"3fb6db85-a6bd-485b-a235-ac35bd267ef5","Type":"ContainerDied","Data":"f2d02644a37545ae9e9d303a10ec5c1c6fde03ca03b630f4c884c6d57e55e3e9"} Sep 29 19:17:22 crc kubenswrapper[4792]: I0929 19:17:22.732676 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/cinder-api-0" Sep 29 19:17:22 crc kubenswrapper[4792]: I0929 19:17:22.962426 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/horizon-dfd9c6b56-wq84c" Sep 29 19:17:22 crc kubenswrapper[4792]: I0929 19:17:22.963287 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-dfd9c6b56-wq84c" Sep 29 19:17:23 crc kubenswrapper[4792]: I0929 19:17:23.078319 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-8494dffd6-7rx5p" Sep 29 19:17:23 crc kubenswrapper[4792]: I0929 19:17:23.078649 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/horizon-8494dffd6-7rx5p" Sep 29 19:17:23 crc kubenswrapper[4792]: I0929 19:17:23.080171 4792 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-8494dffd6-7rx5p" podUID="23845288-b122-49f0-b10d-641cfb94b66f" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.151:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.151:8443: connect: connection refused" Sep 29 19:17:23 crc kubenswrapper[4792]: I0929 19:17:23.940765 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-db-create-nb9z4"] Sep 29 19:17:23 crc kubenswrapper[4792]: I0929 19:17:23.943341 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-nb9z4" Sep 29 19:17:23 crc kubenswrapper[4792]: I0929 19:17:23.948409 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-db-create-nb9z4"] Sep 29 19:17:24 crc kubenswrapper[4792]: I0929 19:17:24.042618 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-db-create-d6db5"] Sep 29 19:17:24 crc kubenswrapper[4792]: I0929 19:17:24.043773 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-d6db5" Sep 29 19:17:24 crc kubenswrapper[4792]: I0929 19:17:24.057714 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-db-create-d6db5"] Sep 29 19:17:24 crc kubenswrapper[4792]: I0929 19:17:24.109986 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gwbcd\" (UniqueName: \"kubernetes.io/projected/0fe5fafb-0225-461a-9d77-b301c7459ec5-kube-api-access-gwbcd\") pod \"nova-api-db-create-nb9z4\" (UID: \"0fe5fafb-0225-461a-9d77-b301c7459ec5\") " pod="openstack/nova-api-db-create-nb9z4" Sep 29 19:17:24 crc kubenswrapper[4792]: I0929 19:17:24.128965 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-db-create-zsqnn"] Sep 29 19:17:24 crc kubenswrapper[4792]: I0929 19:17:24.130167 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-zsqnn" Sep 29 19:17:24 crc kubenswrapper[4792]: I0929 19:17:24.144232 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-db-create-zsqnn"] Sep 29 19:17:24 crc kubenswrapper[4792]: I0929 19:17:24.216427 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mcgmz\" (UniqueName: \"kubernetes.io/projected/6205e51a-b274-4cb0-8e37-4d094352a317-kube-api-access-mcgmz\") pod \"nova-cell0-db-create-d6db5\" (UID: \"6205e51a-b274-4cb0-8e37-4d094352a317\") " pod="openstack/nova-cell0-db-create-d6db5" Sep 29 19:17:24 crc kubenswrapper[4792]: I0929 19:17:24.216899 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g6xls\" (UniqueName: \"kubernetes.io/projected/a9c562dc-e14c-4288-92c0-c03095f3d7a1-kube-api-access-g6xls\") pod \"nova-cell1-db-create-zsqnn\" (UID: \"a9c562dc-e14c-4288-92c0-c03095f3d7a1\") " pod="openstack/nova-cell1-db-create-zsqnn" Sep 29 19:17:24 crc kubenswrapper[4792]: I0929 19:17:24.216943 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gwbcd\" (UniqueName: \"kubernetes.io/projected/0fe5fafb-0225-461a-9d77-b301c7459ec5-kube-api-access-gwbcd\") pod \"nova-api-db-create-nb9z4\" (UID: \"0fe5fafb-0225-461a-9d77-b301c7459ec5\") " pod="openstack/nova-api-db-create-nb9z4" Sep 29 19:17:24 crc kubenswrapper[4792]: I0929 19:17:24.243061 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gwbcd\" (UniqueName: \"kubernetes.io/projected/0fe5fafb-0225-461a-9d77-b301c7459ec5-kube-api-access-gwbcd\") pod \"nova-api-db-create-nb9z4\" (UID: \"0fe5fafb-0225-461a-9d77-b301c7459ec5\") " pod="openstack/nova-api-db-create-nb9z4" Sep 29 19:17:24 crc kubenswrapper[4792]: I0929 19:17:24.261505 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-nb9z4" Sep 29 19:17:24 crc kubenswrapper[4792]: I0929 19:17:24.318999 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g6xls\" (UniqueName: \"kubernetes.io/projected/a9c562dc-e14c-4288-92c0-c03095f3d7a1-kube-api-access-g6xls\") pod \"nova-cell1-db-create-zsqnn\" (UID: \"a9c562dc-e14c-4288-92c0-c03095f3d7a1\") " pod="openstack/nova-cell1-db-create-zsqnn" Sep 29 19:17:24 crc kubenswrapper[4792]: I0929 19:17:24.319355 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mcgmz\" (UniqueName: \"kubernetes.io/projected/6205e51a-b274-4cb0-8e37-4d094352a317-kube-api-access-mcgmz\") pod \"nova-cell0-db-create-d6db5\" (UID: \"6205e51a-b274-4cb0-8e37-4d094352a317\") " pod="openstack/nova-cell0-db-create-d6db5" Sep 29 19:17:24 crc kubenswrapper[4792]: I0929 19:17:24.349145 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mcgmz\" (UniqueName: \"kubernetes.io/projected/6205e51a-b274-4cb0-8e37-4d094352a317-kube-api-access-mcgmz\") pod \"nova-cell0-db-create-d6db5\" (UID: \"6205e51a-b274-4cb0-8e37-4d094352a317\") " pod="openstack/nova-cell0-db-create-d6db5" Sep 29 19:17:24 crc kubenswrapper[4792]: I0929 19:17:24.358290 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-d6db5" Sep 29 19:17:24 crc kubenswrapper[4792]: I0929 19:17:24.358715 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g6xls\" (UniqueName: \"kubernetes.io/projected/a9c562dc-e14c-4288-92c0-c03095f3d7a1-kube-api-access-g6xls\") pod \"nova-cell1-db-create-zsqnn\" (UID: \"a9c562dc-e14c-4288-92c0-c03095f3d7a1\") " pod="openstack/nova-cell1-db-create-zsqnn" Sep 29 19:17:24 crc kubenswrapper[4792]: I0929 19:17:24.444984 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-zsqnn" Sep 29 19:17:25 crc kubenswrapper[4792]: I0929 19:17:25.074002 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-db-create-nb9z4"] Sep 29 19:17:25 crc kubenswrapper[4792]: I0929 19:17:25.148421 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-db-create-d6db5"] Sep 29 19:17:25 crc kubenswrapper[4792]: W0929 19:17:25.150549 4792 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6205e51a_b274_4cb0_8e37_4d094352a317.slice/crio-edfe545eaa58ae6ec076a9853623a91d8a920925c2b6b29d4681169292cdad34 WatchSource:0}: Error finding container edfe545eaa58ae6ec076a9853623a91d8a920925c2b6b29d4681169292cdad34: Status 404 returned error can't find the container with id edfe545eaa58ae6ec076a9853623a91d8a920925c2b6b29d4681169292cdad34 Sep 29 19:17:25 crc kubenswrapper[4792]: I0929 19:17:25.230694 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-db-create-zsqnn"] Sep 29 19:17:25 crc kubenswrapper[4792]: I0929 19:17:25.787824 4792 generic.go:334] "Generic (PLEG): container finished" podID="a9c562dc-e14c-4288-92c0-c03095f3d7a1" containerID="757fa550617bf08b87a28c7008d19bcf691e256201ad1203a0fe098b6bc70946" exitCode=0 Sep 29 19:17:25 crc kubenswrapper[4792]: I0929 19:17:25.787974 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-zsqnn" event={"ID":"a9c562dc-e14c-4288-92c0-c03095f3d7a1","Type":"ContainerDied","Data":"757fa550617bf08b87a28c7008d19bcf691e256201ad1203a0fe098b6bc70946"} Sep 29 19:17:25 crc kubenswrapper[4792]: I0929 19:17:25.788133 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-zsqnn" event={"ID":"a9c562dc-e14c-4288-92c0-c03095f3d7a1","Type":"ContainerStarted","Data":"34bc425325e4ee69016735fe8dd5a833873baea83440ab8bdf68256184a76f21"} Sep 29 19:17:25 crc kubenswrapper[4792]: I0929 19:17:25.789763 4792 generic.go:334] "Generic (PLEG): container finished" podID="0fe5fafb-0225-461a-9d77-b301c7459ec5" containerID="6a0351fe58d5cc3e2344b58251acc870e52d5bae09ac185787b4c0b41e9a1874" exitCode=0 Sep 29 19:17:25 crc kubenswrapper[4792]: I0929 19:17:25.789816 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-nb9z4" event={"ID":"0fe5fafb-0225-461a-9d77-b301c7459ec5","Type":"ContainerDied","Data":"6a0351fe58d5cc3e2344b58251acc870e52d5bae09ac185787b4c0b41e9a1874"} Sep 29 19:17:25 crc kubenswrapper[4792]: I0929 19:17:25.789833 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-nb9z4" event={"ID":"0fe5fafb-0225-461a-9d77-b301c7459ec5","Type":"ContainerStarted","Data":"6cb5fcbe671100fb406f277030a1edadf3e83c10eb34f01dc97a1197879deb0d"} Sep 29 19:17:25 crc kubenswrapper[4792]: I0929 19:17:25.791196 4792 generic.go:334] "Generic (PLEG): container finished" podID="6205e51a-b274-4cb0-8e37-4d094352a317" containerID="f0d4f77c8a55c9b252bc37c845fad8b83b4218e93ef5b362d62aed78c07f838d" exitCode=0 Sep 29 19:17:25 crc kubenswrapper[4792]: I0929 19:17:25.791232 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-d6db5" event={"ID":"6205e51a-b274-4cb0-8e37-4d094352a317","Type":"ContainerDied","Data":"f0d4f77c8a55c9b252bc37c845fad8b83b4218e93ef5b362d62aed78c07f838d"} Sep 29 19:17:25 crc kubenswrapper[4792]: I0929 19:17:25.791248 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-d6db5" event={"ID":"6205e51a-b274-4cb0-8e37-4d094352a317","Type":"ContainerStarted","Data":"edfe545eaa58ae6ec076a9853623a91d8a920925c2b6b29d4681169292cdad34"} Sep 29 19:17:27 crc kubenswrapper[4792]: I0929 19:17:27.263972 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-zsqnn" Sep 29 19:17:27 crc kubenswrapper[4792]: I0929 19:17:27.376050 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-g6xls\" (UniqueName: \"kubernetes.io/projected/a9c562dc-e14c-4288-92c0-c03095f3d7a1-kube-api-access-g6xls\") pod \"a9c562dc-e14c-4288-92c0-c03095f3d7a1\" (UID: \"a9c562dc-e14c-4288-92c0-c03095f3d7a1\") " Sep 29 19:17:27 crc kubenswrapper[4792]: I0929 19:17:27.398136 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a9c562dc-e14c-4288-92c0-c03095f3d7a1-kube-api-access-g6xls" (OuterVolumeSpecName: "kube-api-access-g6xls") pod "a9c562dc-e14c-4288-92c0-c03095f3d7a1" (UID: "a9c562dc-e14c-4288-92c0-c03095f3d7a1"). InnerVolumeSpecName "kube-api-access-g6xls". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:17:27 crc kubenswrapper[4792]: I0929 19:17:27.457935 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-d6db5" Sep 29 19:17:27 crc kubenswrapper[4792]: I0929 19:17:27.466353 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-nb9z4" Sep 29 19:17:27 crc kubenswrapper[4792]: I0929 19:17:27.483746 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-g6xls\" (UniqueName: \"kubernetes.io/projected/a9c562dc-e14c-4288-92c0-c03095f3d7a1-kube-api-access-g6xls\") on node \"crc\" DevicePath \"\"" Sep 29 19:17:27 crc kubenswrapper[4792]: I0929 19:17:27.585417 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mcgmz\" (UniqueName: \"kubernetes.io/projected/6205e51a-b274-4cb0-8e37-4d094352a317-kube-api-access-mcgmz\") pod \"6205e51a-b274-4cb0-8e37-4d094352a317\" (UID: \"6205e51a-b274-4cb0-8e37-4d094352a317\") " Sep 29 19:17:27 crc kubenswrapper[4792]: I0929 19:17:27.585511 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gwbcd\" (UniqueName: \"kubernetes.io/projected/0fe5fafb-0225-461a-9d77-b301c7459ec5-kube-api-access-gwbcd\") pod \"0fe5fafb-0225-461a-9d77-b301c7459ec5\" (UID: \"0fe5fafb-0225-461a-9d77-b301c7459ec5\") " Sep 29 19:17:27 crc kubenswrapper[4792]: I0929 19:17:27.588909 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6205e51a-b274-4cb0-8e37-4d094352a317-kube-api-access-mcgmz" (OuterVolumeSpecName: "kube-api-access-mcgmz") pod "6205e51a-b274-4cb0-8e37-4d094352a317" (UID: "6205e51a-b274-4cb0-8e37-4d094352a317"). InnerVolumeSpecName "kube-api-access-mcgmz". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:17:27 crc kubenswrapper[4792]: I0929 19:17:27.594902 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0fe5fafb-0225-461a-9d77-b301c7459ec5-kube-api-access-gwbcd" (OuterVolumeSpecName: "kube-api-access-gwbcd") pod "0fe5fafb-0225-461a-9d77-b301c7459ec5" (UID: "0fe5fafb-0225-461a-9d77-b301c7459ec5"). InnerVolumeSpecName "kube-api-access-gwbcd". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:17:27 crc kubenswrapper[4792]: I0929 19:17:27.687755 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gwbcd\" (UniqueName: \"kubernetes.io/projected/0fe5fafb-0225-461a-9d77-b301c7459ec5-kube-api-access-gwbcd\") on node \"crc\" DevicePath \"\"" Sep 29 19:17:27 crc kubenswrapper[4792]: I0929 19:17:27.687788 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mcgmz\" (UniqueName: \"kubernetes.io/projected/6205e51a-b274-4cb0-8e37-4d094352a317-kube-api-access-mcgmz\") on node \"crc\" DevicePath \"\"" Sep 29 19:17:27 crc kubenswrapper[4792]: I0929 19:17:27.809149 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-nb9z4" event={"ID":"0fe5fafb-0225-461a-9d77-b301c7459ec5","Type":"ContainerDied","Data":"6cb5fcbe671100fb406f277030a1edadf3e83c10eb34f01dc97a1197879deb0d"} Sep 29 19:17:27 crc kubenswrapper[4792]: I0929 19:17:27.809189 4792 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6cb5fcbe671100fb406f277030a1edadf3e83c10eb34f01dc97a1197879deb0d" Sep 29 19:17:27 crc kubenswrapper[4792]: I0929 19:17:27.809246 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-nb9z4" Sep 29 19:17:27 crc kubenswrapper[4792]: I0929 19:17:27.830578 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-d6db5" event={"ID":"6205e51a-b274-4cb0-8e37-4d094352a317","Type":"ContainerDied","Data":"edfe545eaa58ae6ec076a9853623a91d8a920925c2b6b29d4681169292cdad34"} Sep 29 19:17:27 crc kubenswrapper[4792]: I0929 19:17:27.830623 4792 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="edfe545eaa58ae6ec076a9853623a91d8a920925c2b6b29d4681169292cdad34" Sep 29 19:17:27 crc kubenswrapper[4792]: I0929 19:17:27.830697 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-d6db5" Sep 29 19:17:27 crc kubenswrapper[4792]: I0929 19:17:27.833118 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-zsqnn" event={"ID":"a9c562dc-e14c-4288-92c0-c03095f3d7a1","Type":"ContainerDied","Data":"34bc425325e4ee69016735fe8dd5a833873baea83440ab8bdf68256184a76f21"} Sep 29 19:17:27 crc kubenswrapper[4792]: I0929 19:17:27.833154 4792 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="34bc425325e4ee69016735fe8dd5a833873baea83440ab8bdf68256184a76f21" Sep 29 19:17:27 crc kubenswrapper[4792]: I0929 19:17:27.833214 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-zsqnn" Sep 29 19:17:28 crc kubenswrapper[4792]: I0929 19:17:28.845410 4792 generic.go:334] "Generic (PLEG): container finished" podID="3fb6db85-a6bd-485b-a235-ac35bd267ef5" containerID="bded3e114782abceefa48b41c96909285dd22b518ab768f153d3c92226879e4a" exitCode=0 Sep 29 19:17:28 crc kubenswrapper[4792]: I0929 19:17:28.845460 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"3fb6db85-a6bd-485b-a235-ac35bd267ef5","Type":"ContainerDied","Data":"bded3e114782abceefa48b41c96909285dd22b518ab768f153d3c92226879e4a"} Sep 29 19:17:32 crc kubenswrapper[4792]: I0929 19:17:32.964593 4792 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-dfd9c6b56-wq84c" podUID="eb79e1ef-cf0c-407b-9b37-c7ad8d65a3cc" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.150:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.150:8443: connect: connection refused" Sep 29 19:17:33 crc kubenswrapper[4792]: I0929 19:17:33.080114 4792 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-8494dffd6-7rx5p" podUID="23845288-b122-49f0-b10d-641cfb94b66f" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.151:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.151:8443: connect: connection refused" Sep 29 19:17:33 crc kubenswrapper[4792]: I0929 19:17:33.975726 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-8647-account-create-j4tgd"] Sep 29 19:17:33 crc kubenswrapper[4792]: E0929 19:17:33.976216 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0fe5fafb-0225-461a-9d77-b301c7459ec5" containerName="mariadb-database-create" Sep 29 19:17:33 crc kubenswrapper[4792]: I0929 19:17:33.976232 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="0fe5fafb-0225-461a-9d77-b301c7459ec5" containerName="mariadb-database-create" Sep 29 19:17:33 crc kubenswrapper[4792]: E0929 19:17:33.976252 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6205e51a-b274-4cb0-8e37-4d094352a317" containerName="mariadb-database-create" Sep 29 19:17:33 crc kubenswrapper[4792]: I0929 19:17:33.976260 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="6205e51a-b274-4cb0-8e37-4d094352a317" containerName="mariadb-database-create" Sep 29 19:17:33 crc kubenswrapper[4792]: E0929 19:17:33.976288 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a9c562dc-e14c-4288-92c0-c03095f3d7a1" containerName="mariadb-database-create" Sep 29 19:17:33 crc kubenswrapper[4792]: I0929 19:17:33.976298 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="a9c562dc-e14c-4288-92c0-c03095f3d7a1" containerName="mariadb-database-create" Sep 29 19:17:33 crc kubenswrapper[4792]: I0929 19:17:33.976513 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="0fe5fafb-0225-461a-9d77-b301c7459ec5" containerName="mariadb-database-create" Sep 29 19:17:33 crc kubenswrapper[4792]: I0929 19:17:33.976528 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="a9c562dc-e14c-4288-92c0-c03095f3d7a1" containerName="mariadb-database-create" Sep 29 19:17:33 crc kubenswrapper[4792]: I0929 19:17:33.976546 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="6205e51a-b274-4cb0-8e37-4d094352a317" containerName="mariadb-database-create" Sep 29 19:17:33 crc kubenswrapper[4792]: I0929 19:17:33.977366 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-8647-account-create-j4tgd" Sep 29 19:17:33 crc kubenswrapper[4792]: I0929 19:17:33.980742 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-db-secret" Sep 29 19:17:33 crc kubenswrapper[4792]: I0929 19:17:33.997106 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-8647-account-create-j4tgd"] Sep 29 19:17:34 crc kubenswrapper[4792]: I0929 19:17:34.073130 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-e097-account-create-qvhth"] Sep 29 19:17:34 crc kubenswrapper[4792]: I0929 19:17:34.074277 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-e097-account-create-qvhth" Sep 29 19:17:34 crc kubenswrapper[4792]: I0929 19:17:34.077197 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-db-secret" Sep 29 19:17:34 crc kubenswrapper[4792]: I0929 19:17:34.090434 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-e097-account-create-qvhth"] Sep 29 19:17:34 crc kubenswrapper[4792]: I0929 19:17:34.106422 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-krr4t\" (UniqueName: \"kubernetes.io/projected/c4df94be-b881-4404-a0e4-6f02a72ff60d-kube-api-access-krr4t\") pod \"nova-api-8647-account-create-j4tgd\" (UID: \"c4df94be-b881-4404-a0e4-6f02a72ff60d\") " pod="openstack/nova-api-8647-account-create-j4tgd" Sep 29 19:17:34 crc kubenswrapper[4792]: I0929 19:17:34.208351 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4t94p\" (UniqueName: \"kubernetes.io/projected/e485ebed-017b-48d0-bb72-24571bb0ba2e-kube-api-access-4t94p\") pod \"nova-cell0-e097-account-create-qvhth\" (UID: \"e485ebed-017b-48d0-bb72-24571bb0ba2e\") " pod="openstack/nova-cell0-e097-account-create-qvhth" Sep 29 19:17:34 crc kubenswrapper[4792]: I0929 19:17:34.208694 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-krr4t\" (UniqueName: \"kubernetes.io/projected/c4df94be-b881-4404-a0e4-6f02a72ff60d-kube-api-access-krr4t\") pod \"nova-api-8647-account-create-j4tgd\" (UID: \"c4df94be-b881-4404-a0e4-6f02a72ff60d\") " pod="openstack/nova-api-8647-account-create-j4tgd" Sep 29 19:17:34 crc kubenswrapper[4792]: I0929 19:17:34.227673 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-krr4t\" (UniqueName: \"kubernetes.io/projected/c4df94be-b881-4404-a0e4-6f02a72ff60d-kube-api-access-krr4t\") pod \"nova-api-8647-account-create-j4tgd\" (UID: \"c4df94be-b881-4404-a0e4-6f02a72ff60d\") " pod="openstack/nova-api-8647-account-create-j4tgd" Sep 29 19:17:34 crc kubenswrapper[4792]: I0929 19:17:34.278326 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-db58-account-create-5gg6m"] Sep 29 19:17:34 crc kubenswrapper[4792]: I0929 19:17:34.279793 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db58-account-create-5gg6m" Sep 29 19:17:34 crc kubenswrapper[4792]: I0929 19:17:34.283443 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-db-secret" Sep 29 19:17:34 crc kubenswrapper[4792]: I0929 19:17:34.287446 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-db58-account-create-5gg6m"] Sep 29 19:17:34 crc kubenswrapper[4792]: I0929 19:17:34.311225 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4t94p\" (UniqueName: \"kubernetes.io/projected/e485ebed-017b-48d0-bb72-24571bb0ba2e-kube-api-access-4t94p\") pod \"nova-cell0-e097-account-create-qvhth\" (UID: \"e485ebed-017b-48d0-bb72-24571bb0ba2e\") " pod="openstack/nova-cell0-e097-account-create-qvhth" Sep 29 19:17:34 crc kubenswrapper[4792]: I0929 19:17:34.322736 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-8647-account-create-j4tgd" Sep 29 19:17:34 crc kubenswrapper[4792]: I0929 19:17:34.340556 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4t94p\" (UniqueName: \"kubernetes.io/projected/e485ebed-017b-48d0-bb72-24571bb0ba2e-kube-api-access-4t94p\") pod \"nova-cell0-e097-account-create-qvhth\" (UID: \"e485ebed-017b-48d0-bb72-24571bb0ba2e\") " pod="openstack/nova-cell0-e097-account-create-qvhth" Sep 29 19:17:34 crc kubenswrapper[4792]: I0929 19:17:34.389731 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-e097-account-create-qvhth" Sep 29 19:17:34 crc kubenswrapper[4792]: I0929 19:17:34.412515 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l2t5c\" (UniqueName: \"kubernetes.io/projected/bc7f0299-1915-487b-8cf3-fb9215143c3e-kube-api-access-l2t5c\") pod \"nova-cell1-db58-account-create-5gg6m\" (UID: \"bc7f0299-1915-487b-8cf3-fb9215143c3e\") " pod="openstack/nova-cell1-db58-account-create-5gg6m" Sep 29 19:17:34 crc kubenswrapper[4792]: I0929 19:17:34.514195 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l2t5c\" (UniqueName: \"kubernetes.io/projected/bc7f0299-1915-487b-8cf3-fb9215143c3e-kube-api-access-l2t5c\") pod \"nova-cell1-db58-account-create-5gg6m\" (UID: \"bc7f0299-1915-487b-8cf3-fb9215143c3e\") " pod="openstack/nova-cell1-db58-account-create-5gg6m" Sep 29 19:17:34 crc kubenswrapper[4792]: I0929 19:17:34.545306 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l2t5c\" (UniqueName: \"kubernetes.io/projected/bc7f0299-1915-487b-8cf3-fb9215143c3e-kube-api-access-l2t5c\") pod \"nova-cell1-db58-account-create-5gg6m\" (UID: \"bc7f0299-1915-487b-8cf3-fb9215143c3e\") " pod="openstack/nova-cell1-db58-account-create-5gg6m" Sep 29 19:17:34 crc kubenswrapper[4792]: I0929 19:17:34.597329 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db58-account-create-5gg6m" Sep 29 19:17:34 crc kubenswrapper[4792]: I0929 19:17:34.846722 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-8647-account-create-j4tgd"] Sep 29 19:17:34 crc kubenswrapper[4792]: I0929 19:17:34.902675 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-8647-account-create-j4tgd" event={"ID":"c4df94be-b881-4404-a0e4-6f02a72ff60d","Type":"ContainerStarted","Data":"bdb823d37fa30cb6559191636c6a2285cb2cfcb667381527581b54f9e56eddb0"} Sep 29 19:17:34 crc kubenswrapper[4792]: I0929 19:17:34.917894 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-e097-account-create-qvhth"] Sep 29 19:17:35 crc kubenswrapper[4792]: I0929 19:17:35.080421 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-db58-account-create-5gg6m"] Sep 29 19:17:35 crc kubenswrapper[4792]: I0929 19:17:35.915451 4792 generic.go:334] "Generic (PLEG): container finished" podID="bc7f0299-1915-487b-8cf3-fb9215143c3e" containerID="22dd093354f323ca6a47071b3b9926c7c358e28bf7d19edd123f99a117a26596" exitCode=0 Sep 29 19:17:35 crc kubenswrapper[4792]: I0929 19:17:35.917064 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db58-account-create-5gg6m" event={"ID":"bc7f0299-1915-487b-8cf3-fb9215143c3e","Type":"ContainerDied","Data":"22dd093354f323ca6a47071b3b9926c7c358e28bf7d19edd123f99a117a26596"} Sep 29 19:17:35 crc kubenswrapper[4792]: I0929 19:17:35.917219 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db58-account-create-5gg6m" event={"ID":"bc7f0299-1915-487b-8cf3-fb9215143c3e","Type":"ContainerStarted","Data":"3264538dc15865b20c7b7815889ef63cb486dc5b6abaf1aa6af380dbc1f6f50a"} Sep 29 19:17:35 crc kubenswrapper[4792]: I0929 19:17:35.922355 4792 generic.go:334] "Generic (PLEG): container finished" podID="c4df94be-b881-4404-a0e4-6f02a72ff60d" containerID="3a0687b6f4e62cf46bfd15ea6f7a91fadb4104a6dc851ae3bed5ebebff951b41" exitCode=0 Sep 29 19:17:35 crc kubenswrapper[4792]: I0929 19:17:35.922431 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-8647-account-create-j4tgd" event={"ID":"c4df94be-b881-4404-a0e4-6f02a72ff60d","Type":"ContainerDied","Data":"3a0687b6f4e62cf46bfd15ea6f7a91fadb4104a6dc851ae3bed5ebebff951b41"} Sep 29 19:17:35 crc kubenswrapper[4792]: I0929 19:17:35.924236 4792 generic.go:334] "Generic (PLEG): container finished" podID="e485ebed-017b-48d0-bb72-24571bb0ba2e" containerID="31bc47d37f16098e5e073a6f949770c00105b4895e34cd4ab1ea5f3bf0a9734a" exitCode=0 Sep 29 19:17:35 crc kubenswrapper[4792]: I0929 19:17:35.924281 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-e097-account-create-qvhth" event={"ID":"e485ebed-017b-48d0-bb72-24571bb0ba2e","Type":"ContainerDied","Data":"31bc47d37f16098e5e073a6f949770c00105b4895e34cd4ab1ea5f3bf0a9734a"} Sep 29 19:17:35 crc kubenswrapper[4792]: I0929 19:17:35.924304 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-e097-account-create-qvhth" event={"ID":"e485ebed-017b-48d0-bb72-24571bb0ba2e","Type":"ContainerStarted","Data":"88efee2be746a9990e47a9ec5ff825e298bb243d67f26894a3bf5b33e989d00e"} Sep 29 19:17:37 crc kubenswrapper[4792]: I0929 19:17:37.400178 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db58-account-create-5gg6m" Sep 29 19:17:37 crc kubenswrapper[4792]: I0929 19:17:37.515078 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-l2t5c\" (UniqueName: \"kubernetes.io/projected/bc7f0299-1915-487b-8cf3-fb9215143c3e-kube-api-access-l2t5c\") pod \"bc7f0299-1915-487b-8cf3-fb9215143c3e\" (UID: \"bc7f0299-1915-487b-8cf3-fb9215143c3e\") " Sep 29 19:17:37 crc kubenswrapper[4792]: I0929 19:17:37.539117 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bc7f0299-1915-487b-8cf3-fb9215143c3e-kube-api-access-l2t5c" (OuterVolumeSpecName: "kube-api-access-l2t5c") pod "bc7f0299-1915-487b-8cf3-fb9215143c3e" (UID: "bc7f0299-1915-487b-8cf3-fb9215143c3e"). InnerVolumeSpecName "kube-api-access-l2t5c". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:17:37 crc kubenswrapper[4792]: I0929 19:17:37.610756 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-8647-account-create-j4tgd" Sep 29 19:17:37 crc kubenswrapper[4792]: I0929 19:17:37.616919 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-e097-account-create-qvhth" Sep 29 19:17:37 crc kubenswrapper[4792]: I0929 19:17:37.617182 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-l2t5c\" (UniqueName: \"kubernetes.io/projected/bc7f0299-1915-487b-8cf3-fb9215143c3e-kube-api-access-l2t5c\") on node \"crc\" DevicePath \"\"" Sep 29 19:17:37 crc kubenswrapper[4792]: I0929 19:17:37.718280 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-krr4t\" (UniqueName: \"kubernetes.io/projected/c4df94be-b881-4404-a0e4-6f02a72ff60d-kube-api-access-krr4t\") pod \"c4df94be-b881-4404-a0e4-6f02a72ff60d\" (UID: \"c4df94be-b881-4404-a0e4-6f02a72ff60d\") " Sep 29 19:17:37 crc kubenswrapper[4792]: I0929 19:17:37.718381 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4t94p\" (UniqueName: \"kubernetes.io/projected/e485ebed-017b-48d0-bb72-24571bb0ba2e-kube-api-access-4t94p\") pod \"e485ebed-017b-48d0-bb72-24571bb0ba2e\" (UID: \"e485ebed-017b-48d0-bb72-24571bb0ba2e\") " Sep 29 19:17:37 crc kubenswrapper[4792]: I0929 19:17:37.721651 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e485ebed-017b-48d0-bb72-24571bb0ba2e-kube-api-access-4t94p" (OuterVolumeSpecName: "kube-api-access-4t94p") pod "e485ebed-017b-48d0-bb72-24571bb0ba2e" (UID: "e485ebed-017b-48d0-bb72-24571bb0ba2e"). InnerVolumeSpecName "kube-api-access-4t94p". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:17:37 crc kubenswrapper[4792]: I0929 19:17:37.721892 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c4df94be-b881-4404-a0e4-6f02a72ff60d-kube-api-access-krr4t" (OuterVolumeSpecName: "kube-api-access-krr4t") pod "c4df94be-b881-4404-a0e4-6f02a72ff60d" (UID: "c4df94be-b881-4404-a0e4-6f02a72ff60d"). InnerVolumeSpecName "kube-api-access-krr4t". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:17:37 crc kubenswrapper[4792]: I0929 19:17:37.820069 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-krr4t\" (UniqueName: \"kubernetes.io/projected/c4df94be-b881-4404-a0e4-6f02a72ff60d-kube-api-access-krr4t\") on node \"crc\" DevicePath \"\"" Sep 29 19:17:37 crc kubenswrapper[4792]: I0929 19:17:37.820370 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4t94p\" (UniqueName: \"kubernetes.io/projected/e485ebed-017b-48d0-bb72-24571bb0ba2e-kube-api-access-4t94p\") on node \"crc\" DevicePath \"\"" Sep 29 19:17:37 crc kubenswrapper[4792]: I0929 19:17:37.948488 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-e097-account-create-qvhth" event={"ID":"e485ebed-017b-48d0-bb72-24571bb0ba2e","Type":"ContainerDied","Data":"88efee2be746a9990e47a9ec5ff825e298bb243d67f26894a3bf5b33e989d00e"} Sep 29 19:17:37 crc kubenswrapper[4792]: I0929 19:17:37.948528 4792 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="88efee2be746a9990e47a9ec5ff825e298bb243d67f26894a3bf5b33e989d00e" Sep 29 19:17:37 crc kubenswrapper[4792]: I0929 19:17:37.949087 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-e097-account-create-qvhth" Sep 29 19:17:37 crc kubenswrapper[4792]: I0929 19:17:37.950264 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db58-account-create-5gg6m" event={"ID":"bc7f0299-1915-487b-8cf3-fb9215143c3e","Type":"ContainerDied","Data":"3264538dc15865b20c7b7815889ef63cb486dc5b6abaf1aa6af380dbc1f6f50a"} Sep 29 19:17:37 crc kubenswrapper[4792]: I0929 19:17:37.950289 4792 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3264538dc15865b20c7b7815889ef63cb486dc5b6abaf1aa6af380dbc1f6f50a" Sep 29 19:17:37 crc kubenswrapper[4792]: I0929 19:17:37.950341 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db58-account-create-5gg6m" Sep 29 19:17:37 crc kubenswrapper[4792]: I0929 19:17:37.997052 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-8647-account-create-j4tgd" event={"ID":"c4df94be-b881-4404-a0e4-6f02a72ff60d","Type":"ContainerDied","Data":"bdb823d37fa30cb6559191636c6a2285cb2cfcb667381527581b54f9e56eddb0"} Sep 29 19:17:37 crc kubenswrapper[4792]: I0929 19:17:37.997104 4792 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="bdb823d37fa30cb6559191636c6a2285cb2cfcb667381527581b54f9e56eddb0" Sep 29 19:17:37 crc kubenswrapper[4792]: I0929 19:17:37.997161 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-8647-account-create-j4tgd" Sep 29 19:17:39 crc kubenswrapper[4792]: I0929 19:17:39.327938 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-conductor-db-sync-k2c4z"] Sep 29 19:17:39 crc kubenswrapper[4792]: E0929 19:17:39.328630 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bc7f0299-1915-487b-8cf3-fb9215143c3e" containerName="mariadb-account-create" Sep 29 19:17:39 crc kubenswrapper[4792]: I0929 19:17:39.328642 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="bc7f0299-1915-487b-8cf3-fb9215143c3e" containerName="mariadb-account-create" Sep 29 19:17:39 crc kubenswrapper[4792]: E0929 19:17:39.328659 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e485ebed-017b-48d0-bb72-24571bb0ba2e" containerName="mariadb-account-create" Sep 29 19:17:39 crc kubenswrapper[4792]: I0929 19:17:39.328665 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="e485ebed-017b-48d0-bb72-24571bb0ba2e" containerName="mariadb-account-create" Sep 29 19:17:39 crc kubenswrapper[4792]: E0929 19:17:39.328698 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c4df94be-b881-4404-a0e4-6f02a72ff60d" containerName="mariadb-account-create" Sep 29 19:17:39 crc kubenswrapper[4792]: I0929 19:17:39.328704 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="c4df94be-b881-4404-a0e4-6f02a72ff60d" containerName="mariadb-account-create" Sep 29 19:17:39 crc kubenswrapper[4792]: I0929 19:17:39.328897 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="c4df94be-b881-4404-a0e4-6f02a72ff60d" containerName="mariadb-account-create" Sep 29 19:17:39 crc kubenswrapper[4792]: I0929 19:17:39.328910 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="e485ebed-017b-48d0-bb72-24571bb0ba2e" containerName="mariadb-account-create" Sep 29 19:17:39 crc kubenswrapper[4792]: I0929 19:17:39.328922 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="bc7f0299-1915-487b-8cf3-fb9215143c3e" containerName="mariadb-account-create" Sep 29 19:17:39 crc kubenswrapper[4792]: I0929 19:17:39.329665 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-k2c4z" Sep 29 19:17:39 crc kubenswrapper[4792]: I0929 19:17:39.332938 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-nova-dockercfg-xxzkx" Sep 29 19:17:39 crc kubenswrapper[4792]: I0929 19:17:39.333589 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-scripts" Sep 29 19:17:39 crc kubenswrapper[4792]: I0929 19:17:39.333794 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-config-data" Sep 29 19:17:39 crc kubenswrapper[4792]: I0929 19:17:39.337722 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-k2c4z"] Sep 29 19:17:39 crc kubenswrapper[4792]: I0929 19:17:39.464107 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v4zhp\" (UniqueName: \"kubernetes.io/projected/b3ee2364-5037-46f9-88b2-abb515fdc1b0-kube-api-access-v4zhp\") pod \"nova-cell0-conductor-db-sync-k2c4z\" (UID: \"b3ee2364-5037-46f9-88b2-abb515fdc1b0\") " pod="openstack/nova-cell0-conductor-db-sync-k2c4z" Sep 29 19:17:39 crc kubenswrapper[4792]: I0929 19:17:39.464396 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b3ee2364-5037-46f9-88b2-abb515fdc1b0-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-k2c4z\" (UID: \"b3ee2364-5037-46f9-88b2-abb515fdc1b0\") " pod="openstack/nova-cell0-conductor-db-sync-k2c4z" Sep 29 19:17:39 crc kubenswrapper[4792]: I0929 19:17:39.464654 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b3ee2364-5037-46f9-88b2-abb515fdc1b0-scripts\") pod \"nova-cell0-conductor-db-sync-k2c4z\" (UID: \"b3ee2364-5037-46f9-88b2-abb515fdc1b0\") " pod="openstack/nova-cell0-conductor-db-sync-k2c4z" Sep 29 19:17:39 crc kubenswrapper[4792]: I0929 19:17:39.464803 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b3ee2364-5037-46f9-88b2-abb515fdc1b0-config-data\") pod \"nova-cell0-conductor-db-sync-k2c4z\" (UID: \"b3ee2364-5037-46f9-88b2-abb515fdc1b0\") " pod="openstack/nova-cell0-conductor-db-sync-k2c4z" Sep 29 19:17:39 crc kubenswrapper[4792]: I0929 19:17:39.566897 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v4zhp\" (UniqueName: \"kubernetes.io/projected/b3ee2364-5037-46f9-88b2-abb515fdc1b0-kube-api-access-v4zhp\") pod \"nova-cell0-conductor-db-sync-k2c4z\" (UID: \"b3ee2364-5037-46f9-88b2-abb515fdc1b0\") " pod="openstack/nova-cell0-conductor-db-sync-k2c4z" Sep 29 19:17:39 crc kubenswrapper[4792]: I0929 19:17:39.567464 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b3ee2364-5037-46f9-88b2-abb515fdc1b0-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-k2c4z\" (UID: \"b3ee2364-5037-46f9-88b2-abb515fdc1b0\") " pod="openstack/nova-cell0-conductor-db-sync-k2c4z" Sep 29 19:17:39 crc kubenswrapper[4792]: I0929 19:17:39.568581 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b3ee2364-5037-46f9-88b2-abb515fdc1b0-scripts\") pod \"nova-cell0-conductor-db-sync-k2c4z\" (UID: \"b3ee2364-5037-46f9-88b2-abb515fdc1b0\") " pod="openstack/nova-cell0-conductor-db-sync-k2c4z" Sep 29 19:17:39 crc kubenswrapper[4792]: I0929 19:17:39.568728 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b3ee2364-5037-46f9-88b2-abb515fdc1b0-config-data\") pod \"nova-cell0-conductor-db-sync-k2c4z\" (UID: \"b3ee2364-5037-46f9-88b2-abb515fdc1b0\") " pod="openstack/nova-cell0-conductor-db-sync-k2c4z" Sep 29 19:17:39 crc kubenswrapper[4792]: I0929 19:17:39.584808 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b3ee2364-5037-46f9-88b2-abb515fdc1b0-scripts\") pod \"nova-cell0-conductor-db-sync-k2c4z\" (UID: \"b3ee2364-5037-46f9-88b2-abb515fdc1b0\") " pod="openstack/nova-cell0-conductor-db-sync-k2c4z" Sep 29 19:17:39 crc kubenswrapper[4792]: I0929 19:17:39.584837 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b3ee2364-5037-46f9-88b2-abb515fdc1b0-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-k2c4z\" (UID: \"b3ee2364-5037-46f9-88b2-abb515fdc1b0\") " pod="openstack/nova-cell0-conductor-db-sync-k2c4z" Sep 29 19:17:39 crc kubenswrapper[4792]: I0929 19:17:39.585028 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b3ee2364-5037-46f9-88b2-abb515fdc1b0-config-data\") pod \"nova-cell0-conductor-db-sync-k2c4z\" (UID: \"b3ee2364-5037-46f9-88b2-abb515fdc1b0\") " pod="openstack/nova-cell0-conductor-db-sync-k2c4z" Sep 29 19:17:39 crc kubenswrapper[4792]: I0929 19:17:39.596441 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v4zhp\" (UniqueName: \"kubernetes.io/projected/b3ee2364-5037-46f9-88b2-abb515fdc1b0-kube-api-access-v4zhp\") pod \"nova-cell0-conductor-db-sync-k2c4z\" (UID: \"b3ee2364-5037-46f9-88b2-abb515fdc1b0\") " pod="openstack/nova-cell0-conductor-db-sync-k2c4z" Sep 29 19:17:39 crc kubenswrapper[4792]: I0929 19:17:39.664479 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-k2c4z" Sep 29 19:17:40 crc kubenswrapper[4792]: I0929 19:17:40.034947 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-k2c4z"] Sep 29 19:17:41 crc kubenswrapper[4792]: I0929 19:17:41.030518 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-k2c4z" event={"ID":"b3ee2364-5037-46f9-88b2-abb515fdc1b0","Type":"ContainerStarted","Data":"04568a5d1d77f8fef19f4eb505e8db8c38393a4e4969986f35539db0a61efa46"} Sep 29 19:17:41 crc kubenswrapper[4792]: I0929 19:17:41.959953 4792 patch_prober.go:28] interesting pod/machine-config-daemon-p5q59 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 19:17:41 crc kubenswrapper[4792]: I0929 19:17:41.960011 4792 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" podUID="0ae66548-086e-4ca9-bd6f-281ce46e7557" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 19:17:41 crc kubenswrapper[4792]: I0929 19:17:41.960046 4792 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" Sep 29 19:17:41 crc kubenswrapper[4792]: I0929 19:17:41.960640 4792 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"e8bc360625c05ed5b39b0bdabe37934fb480a91515b533db0262f5a58fa6cf95"} pod="openshift-machine-config-operator/machine-config-daemon-p5q59" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 29 19:17:41 crc kubenswrapper[4792]: I0929 19:17:41.960692 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" podUID="0ae66548-086e-4ca9-bd6f-281ce46e7557" containerName="machine-config-daemon" containerID="cri-o://e8bc360625c05ed5b39b0bdabe37934fb480a91515b533db0262f5a58fa6cf95" gracePeriod=600 Sep 29 19:17:43 crc kubenswrapper[4792]: I0929 19:17:43.067811 4792 generic.go:334] "Generic (PLEG): container finished" podID="0ae66548-086e-4ca9-bd6f-281ce46e7557" containerID="e8bc360625c05ed5b39b0bdabe37934fb480a91515b533db0262f5a58fa6cf95" exitCode=0 Sep 29 19:17:43 crc kubenswrapper[4792]: I0929 19:17:43.067898 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" event={"ID":"0ae66548-086e-4ca9-bd6f-281ce46e7557","Type":"ContainerDied","Data":"e8bc360625c05ed5b39b0bdabe37934fb480a91515b533db0262f5a58fa6cf95"} Sep 29 19:17:43 crc kubenswrapper[4792]: I0929 19:17:43.069174 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" event={"ID":"0ae66548-086e-4ca9-bd6f-281ce46e7557","Type":"ContainerStarted","Data":"4da2f3e8ceb0a8e1e559272a172c2d5b11ff46e91e3ba55c40264756f850c284"} Sep 29 19:17:43 crc kubenswrapper[4792]: I0929 19:17:43.069271 4792 scope.go:117] "RemoveContainer" containerID="5fe4636f526132681f79866adf93cfab5bd3a4171ad63c289794ff569221d1f4" Sep 29 19:17:43 crc kubenswrapper[4792]: I0929 19:17:43.319402 4792 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ceilometer-0" podUID="3fb6db85-a6bd-485b-a235-ac35bd267ef5" containerName="proxy-httpd" probeResult="failure" output="HTTP probe failed with statuscode: 503" Sep 29 19:17:45 crc kubenswrapper[4792]: I0929 19:17:45.862100 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/horizon-8494dffd6-7rx5p" Sep 29 19:17:45 crc kubenswrapper[4792]: I0929 19:17:45.891706 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/horizon-dfd9c6b56-wq84c" Sep 29 19:17:47 crc kubenswrapper[4792]: I0929 19:17:47.873731 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/horizon-dfd9c6b56-wq84c" Sep 29 19:17:47 crc kubenswrapper[4792]: I0929 19:17:47.981875 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/horizon-8494dffd6-7rx5p" Sep 29 19:17:48 crc kubenswrapper[4792]: I0929 19:17:48.065198 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-dfd9c6b56-wq84c"] Sep 29 19:17:48 crc kubenswrapper[4792]: I0929 19:17:48.135736 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-k2c4z" event={"ID":"b3ee2364-5037-46f9-88b2-abb515fdc1b0","Type":"ContainerStarted","Data":"92fc46631ef693a3f819c878eda51eb6a3b4a7dbfc255d6e415ea9ff92df2819"} Sep 29 19:17:48 crc kubenswrapper[4792]: I0929 19:17:48.146250 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-dfd9c6b56-wq84c" podUID="eb79e1ef-cf0c-407b-9b37-c7ad8d65a3cc" containerName="horizon-log" containerID="cri-o://44478d1bf7a5ee27026ad7e66c3d25fb1975493dd5e5e44e83e0e00c55fcb046" gracePeriod=30 Sep 29 19:17:48 crc kubenswrapper[4792]: I0929 19:17:48.146485 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-dfd9c6b56-wq84c" podUID="eb79e1ef-cf0c-407b-9b37-c7ad8d65a3cc" containerName="horizon" containerID="cri-o://33392c65aaa8fe7892add1ddea19e02248b8bee9d86ee8c7d1300e0357c0e768" gracePeriod=30 Sep 29 19:17:48 crc kubenswrapper[4792]: I0929 19:17:48.156922 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-conductor-db-sync-k2c4z" podStartSLOduration=1.864393435 podStartE2EDuration="9.156887278s" podCreationTimestamp="2025-09-29 19:17:39 +0000 UTC" firstStartedPulling="2025-09-29 19:17:40.041596068 +0000 UTC m=+1272.034903464" lastFinishedPulling="2025-09-29 19:17:47.334089911 +0000 UTC m=+1279.327397307" observedRunningTime="2025-09-29 19:17:48.155630855 +0000 UTC m=+1280.148938251" watchObservedRunningTime="2025-09-29 19:17:48.156887278 +0000 UTC m=+1280.150194674" Sep 29 19:17:50 crc kubenswrapper[4792]: I0929 19:17:50.099259 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 29 19:17:50 crc kubenswrapper[4792]: I0929 19:17:50.163526 4792 generic.go:334] "Generic (PLEG): container finished" podID="3fb6db85-a6bd-485b-a235-ac35bd267ef5" containerID="6751b4a9361e85878ad03e18cad2a00fdf96cfb86a09f5026c14552aa6e567ab" exitCode=137 Sep 29 19:17:50 crc kubenswrapper[4792]: I0929 19:17:50.163571 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"3fb6db85-a6bd-485b-a235-ac35bd267ef5","Type":"ContainerDied","Data":"6751b4a9361e85878ad03e18cad2a00fdf96cfb86a09f5026c14552aa6e567ab"} Sep 29 19:17:50 crc kubenswrapper[4792]: I0929 19:17:50.163607 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"3fb6db85-a6bd-485b-a235-ac35bd267ef5","Type":"ContainerDied","Data":"918f1840e73cf575a65cc230d10c1e9a67668a22d39ec24c9bb85772ea9d559d"} Sep 29 19:17:50 crc kubenswrapper[4792]: I0929 19:17:50.163629 4792 scope.go:117] "RemoveContainer" containerID="6751b4a9361e85878ad03e18cad2a00fdf96cfb86a09f5026c14552aa6e567ab" Sep 29 19:17:50 crc kubenswrapper[4792]: I0929 19:17:50.163776 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 29 19:17:50 crc kubenswrapper[4792]: I0929 19:17:50.164934 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3fb6db85-a6bd-485b-a235-ac35bd267ef5-combined-ca-bundle\") pod \"3fb6db85-a6bd-485b-a235-ac35bd267ef5\" (UID: \"3fb6db85-a6bd-485b-a235-ac35bd267ef5\") " Sep 29 19:17:50 crc kubenswrapper[4792]: I0929 19:17:50.164980 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3fb6db85-a6bd-485b-a235-ac35bd267ef5-config-data\") pod \"3fb6db85-a6bd-485b-a235-ac35bd267ef5\" (UID: \"3fb6db85-a6bd-485b-a235-ac35bd267ef5\") " Sep 29 19:17:50 crc kubenswrapper[4792]: I0929 19:17:50.165021 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3fb6db85-a6bd-485b-a235-ac35bd267ef5-log-httpd\") pod \"3fb6db85-a6bd-485b-a235-ac35bd267ef5\" (UID: \"3fb6db85-a6bd-485b-a235-ac35bd267ef5\") " Sep 29 19:17:50 crc kubenswrapper[4792]: I0929 19:17:50.165049 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/3fb6db85-a6bd-485b-a235-ac35bd267ef5-sg-core-conf-yaml\") pod \"3fb6db85-a6bd-485b-a235-ac35bd267ef5\" (UID: \"3fb6db85-a6bd-485b-a235-ac35bd267ef5\") " Sep 29 19:17:50 crc kubenswrapper[4792]: I0929 19:17:50.165118 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hds6s\" (UniqueName: \"kubernetes.io/projected/3fb6db85-a6bd-485b-a235-ac35bd267ef5-kube-api-access-hds6s\") pod \"3fb6db85-a6bd-485b-a235-ac35bd267ef5\" (UID: \"3fb6db85-a6bd-485b-a235-ac35bd267ef5\") " Sep 29 19:17:50 crc kubenswrapper[4792]: I0929 19:17:50.165207 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3fb6db85-a6bd-485b-a235-ac35bd267ef5-run-httpd\") pod \"3fb6db85-a6bd-485b-a235-ac35bd267ef5\" (UID: \"3fb6db85-a6bd-485b-a235-ac35bd267ef5\") " Sep 29 19:17:50 crc kubenswrapper[4792]: I0929 19:17:50.165229 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3fb6db85-a6bd-485b-a235-ac35bd267ef5-scripts\") pod \"3fb6db85-a6bd-485b-a235-ac35bd267ef5\" (UID: \"3fb6db85-a6bd-485b-a235-ac35bd267ef5\") " Sep 29 19:17:50 crc kubenswrapper[4792]: I0929 19:17:50.166261 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3fb6db85-a6bd-485b-a235-ac35bd267ef5-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "3fb6db85-a6bd-485b-a235-ac35bd267ef5" (UID: "3fb6db85-a6bd-485b-a235-ac35bd267ef5"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 19:17:50 crc kubenswrapper[4792]: I0929 19:17:50.166602 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3fb6db85-a6bd-485b-a235-ac35bd267ef5-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "3fb6db85-a6bd-485b-a235-ac35bd267ef5" (UID: "3fb6db85-a6bd-485b-a235-ac35bd267ef5"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 19:17:50 crc kubenswrapper[4792]: I0929 19:17:50.168118 4792 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3fb6db85-a6bd-485b-a235-ac35bd267ef5-run-httpd\") on node \"crc\" DevicePath \"\"" Sep 29 19:17:50 crc kubenswrapper[4792]: I0929 19:17:50.168150 4792 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3fb6db85-a6bd-485b-a235-ac35bd267ef5-log-httpd\") on node \"crc\" DevicePath \"\"" Sep 29 19:17:50 crc kubenswrapper[4792]: I0929 19:17:50.171403 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3fb6db85-a6bd-485b-a235-ac35bd267ef5-kube-api-access-hds6s" (OuterVolumeSpecName: "kube-api-access-hds6s") pod "3fb6db85-a6bd-485b-a235-ac35bd267ef5" (UID: "3fb6db85-a6bd-485b-a235-ac35bd267ef5"). InnerVolumeSpecName "kube-api-access-hds6s". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:17:50 crc kubenswrapper[4792]: I0929 19:17:50.173010 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3fb6db85-a6bd-485b-a235-ac35bd267ef5-scripts" (OuterVolumeSpecName: "scripts") pod "3fb6db85-a6bd-485b-a235-ac35bd267ef5" (UID: "3fb6db85-a6bd-485b-a235-ac35bd267ef5"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:17:50 crc kubenswrapper[4792]: I0929 19:17:50.200350 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3fb6db85-a6bd-485b-a235-ac35bd267ef5-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "3fb6db85-a6bd-485b-a235-ac35bd267ef5" (UID: "3fb6db85-a6bd-485b-a235-ac35bd267ef5"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:17:50 crc kubenswrapper[4792]: I0929 19:17:50.246118 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3fb6db85-a6bd-485b-a235-ac35bd267ef5-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "3fb6db85-a6bd-485b-a235-ac35bd267ef5" (UID: "3fb6db85-a6bd-485b-a235-ac35bd267ef5"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:17:50 crc kubenswrapper[4792]: I0929 19:17:50.269539 4792 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/3fb6db85-a6bd-485b-a235-ac35bd267ef5-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Sep 29 19:17:50 crc kubenswrapper[4792]: I0929 19:17:50.269863 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hds6s\" (UniqueName: \"kubernetes.io/projected/3fb6db85-a6bd-485b-a235-ac35bd267ef5-kube-api-access-hds6s\") on node \"crc\" DevicePath \"\"" Sep 29 19:17:50 crc kubenswrapper[4792]: I0929 19:17:50.269974 4792 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3fb6db85-a6bd-485b-a235-ac35bd267ef5-scripts\") on node \"crc\" DevicePath \"\"" Sep 29 19:17:50 crc kubenswrapper[4792]: I0929 19:17:50.270044 4792 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3fb6db85-a6bd-485b-a235-ac35bd267ef5-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 19:17:50 crc kubenswrapper[4792]: I0929 19:17:50.270545 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3fb6db85-a6bd-485b-a235-ac35bd267ef5-config-data" (OuterVolumeSpecName: "config-data") pod "3fb6db85-a6bd-485b-a235-ac35bd267ef5" (UID: "3fb6db85-a6bd-485b-a235-ac35bd267ef5"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:17:50 crc kubenswrapper[4792]: I0929 19:17:50.276060 4792 scope.go:117] "RemoveContainer" containerID="ad9d7daabe2b8aec6b8fbb142b7fd3990380503eb15d9da89814be6ff90eed99" Sep 29 19:17:50 crc kubenswrapper[4792]: I0929 19:17:50.293784 4792 scope.go:117] "RemoveContainer" containerID="f2d02644a37545ae9e9d303a10ec5c1c6fde03ca03b630f4c884c6d57e55e3e9" Sep 29 19:17:50 crc kubenswrapper[4792]: I0929 19:17:50.313366 4792 scope.go:117] "RemoveContainer" containerID="bded3e114782abceefa48b41c96909285dd22b518ab768f153d3c92226879e4a" Sep 29 19:17:50 crc kubenswrapper[4792]: I0929 19:17:50.331524 4792 scope.go:117] "RemoveContainer" containerID="6751b4a9361e85878ad03e18cad2a00fdf96cfb86a09f5026c14552aa6e567ab" Sep 29 19:17:50 crc kubenswrapper[4792]: E0929 19:17:50.331877 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6751b4a9361e85878ad03e18cad2a00fdf96cfb86a09f5026c14552aa6e567ab\": container with ID starting with 6751b4a9361e85878ad03e18cad2a00fdf96cfb86a09f5026c14552aa6e567ab not found: ID does not exist" containerID="6751b4a9361e85878ad03e18cad2a00fdf96cfb86a09f5026c14552aa6e567ab" Sep 29 19:17:50 crc kubenswrapper[4792]: I0929 19:17:50.331918 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6751b4a9361e85878ad03e18cad2a00fdf96cfb86a09f5026c14552aa6e567ab"} err="failed to get container status \"6751b4a9361e85878ad03e18cad2a00fdf96cfb86a09f5026c14552aa6e567ab\": rpc error: code = NotFound desc = could not find container \"6751b4a9361e85878ad03e18cad2a00fdf96cfb86a09f5026c14552aa6e567ab\": container with ID starting with 6751b4a9361e85878ad03e18cad2a00fdf96cfb86a09f5026c14552aa6e567ab not found: ID does not exist" Sep 29 19:17:50 crc kubenswrapper[4792]: I0929 19:17:50.331943 4792 scope.go:117] "RemoveContainer" containerID="ad9d7daabe2b8aec6b8fbb142b7fd3990380503eb15d9da89814be6ff90eed99" Sep 29 19:17:50 crc kubenswrapper[4792]: E0929 19:17:50.332233 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ad9d7daabe2b8aec6b8fbb142b7fd3990380503eb15d9da89814be6ff90eed99\": container with ID starting with ad9d7daabe2b8aec6b8fbb142b7fd3990380503eb15d9da89814be6ff90eed99 not found: ID does not exist" containerID="ad9d7daabe2b8aec6b8fbb142b7fd3990380503eb15d9da89814be6ff90eed99" Sep 29 19:17:50 crc kubenswrapper[4792]: I0929 19:17:50.332261 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ad9d7daabe2b8aec6b8fbb142b7fd3990380503eb15d9da89814be6ff90eed99"} err="failed to get container status \"ad9d7daabe2b8aec6b8fbb142b7fd3990380503eb15d9da89814be6ff90eed99\": rpc error: code = NotFound desc = could not find container \"ad9d7daabe2b8aec6b8fbb142b7fd3990380503eb15d9da89814be6ff90eed99\": container with ID starting with ad9d7daabe2b8aec6b8fbb142b7fd3990380503eb15d9da89814be6ff90eed99 not found: ID does not exist" Sep 29 19:17:50 crc kubenswrapper[4792]: I0929 19:17:50.332281 4792 scope.go:117] "RemoveContainer" containerID="f2d02644a37545ae9e9d303a10ec5c1c6fde03ca03b630f4c884c6d57e55e3e9" Sep 29 19:17:50 crc kubenswrapper[4792]: E0929 19:17:50.332618 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f2d02644a37545ae9e9d303a10ec5c1c6fde03ca03b630f4c884c6d57e55e3e9\": container with ID starting with f2d02644a37545ae9e9d303a10ec5c1c6fde03ca03b630f4c884c6d57e55e3e9 not found: ID does not exist" containerID="f2d02644a37545ae9e9d303a10ec5c1c6fde03ca03b630f4c884c6d57e55e3e9" Sep 29 19:17:50 crc kubenswrapper[4792]: I0929 19:17:50.332639 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f2d02644a37545ae9e9d303a10ec5c1c6fde03ca03b630f4c884c6d57e55e3e9"} err="failed to get container status \"f2d02644a37545ae9e9d303a10ec5c1c6fde03ca03b630f4c884c6d57e55e3e9\": rpc error: code = NotFound desc = could not find container \"f2d02644a37545ae9e9d303a10ec5c1c6fde03ca03b630f4c884c6d57e55e3e9\": container with ID starting with f2d02644a37545ae9e9d303a10ec5c1c6fde03ca03b630f4c884c6d57e55e3e9 not found: ID does not exist" Sep 29 19:17:50 crc kubenswrapper[4792]: I0929 19:17:50.332651 4792 scope.go:117] "RemoveContainer" containerID="bded3e114782abceefa48b41c96909285dd22b518ab768f153d3c92226879e4a" Sep 29 19:17:50 crc kubenswrapper[4792]: E0929 19:17:50.332818 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bded3e114782abceefa48b41c96909285dd22b518ab768f153d3c92226879e4a\": container with ID starting with bded3e114782abceefa48b41c96909285dd22b518ab768f153d3c92226879e4a not found: ID does not exist" containerID="bded3e114782abceefa48b41c96909285dd22b518ab768f153d3c92226879e4a" Sep 29 19:17:50 crc kubenswrapper[4792]: I0929 19:17:50.332838 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bded3e114782abceefa48b41c96909285dd22b518ab768f153d3c92226879e4a"} err="failed to get container status \"bded3e114782abceefa48b41c96909285dd22b518ab768f153d3c92226879e4a\": rpc error: code = NotFound desc = could not find container \"bded3e114782abceefa48b41c96909285dd22b518ab768f153d3c92226879e4a\": container with ID starting with bded3e114782abceefa48b41c96909285dd22b518ab768f153d3c92226879e4a not found: ID does not exist" Sep 29 19:17:50 crc kubenswrapper[4792]: I0929 19:17:50.371497 4792 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3fb6db85-a6bd-485b-a235-ac35bd267ef5-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 19:17:50 crc kubenswrapper[4792]: I0929 19:17:50.498273 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Sep 29 19:17:50 crc kubenswrapper[4792]: I0929 19:17:50.509837 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Sep 29 19:17:50 crc kubenswrapper[4792]: I0929 19:17:50.529592 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Sep 29 19:17:50 crc kubenswrapper[4792]: E0929 19:17:50.529969 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3fb6db85-a6bd-485b-a235-ac35bd267ef5" containerName="proxy-httpd" Sep 29 19:17:50 crc kubenswrapper[4792]: I0929 19:17:50.529984 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="3fb6db85-a6bd-485b-a235-ac35bd267ef5" containerName="proxy-httpd" Sep 29 19:17:50 crc kubenswrapper[4792]: E0929 19:17:50.529997 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3fb6db85-a6bd-485b-a235-ac35bd267ef5" containerName="ceilometer-central-agent" Sep 29 19:17:50 crc kubenswrapper[4792]: I0929 19:17:50.530002 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="3fb6db85-a6bd-485b-a235-ac35bd267ef5" containerName="ceilometer-central-agent" Sep 29 19:17:50 crc kubenswrapper[4792]: E0929 19:17:50.530010 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3fb6db85-a6bd-485b-a235-ac35bd267ef5" containerName="sg-core" Sep 29 19:17:50 crc kubenswrapper[4792]: I0929 19:17:50.530016 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="3fb6db85-a6bd-485b-a235-ac35bd267ef5" containerName="sg-core" Sep 29 19:17:50 crc kubenswrapper[4792]: E0929 19:17:50.530031 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3fb6db85-a6bd-485b-a235-ac35bd267ef5" containerName="ceilometer-notification-agent" Sep 29 19:17:50 crc kubenswrapper[4792]: I0929 19:17:50.530037 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="3fb6db85-a6bd-485b-a235-ac35bd267ef5" containerName="ceilometer-notification-agent" Sep 29 19:17:50 crc kubenswrapper[4792]: I0929 19:17:50.530201 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="3fb6db85-a6bd-485b-a235-ac35bd267ef5" containerName="proxy-httpd" Sep 29 19:17:50 crc kubenswrapper[4792]: I0929 19:17:50.530219 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="3fb6db85-a6bd-485b-a235-ac35bd267ef5" containerName="ceilometer-central-agent" Sep 29 19:17:50 crc kubenswrapper[4792]: I0929 19:17:50.530229 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="3fb6db85-a6bd-485b-a235-ac35bd267ef5" containerName="ceilometer-notification-agent" Sep 29 19:17:50 crc kubenswrapper[4792]: I0929 19:17:50.530243 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="3fb6db85-a6bd-485b-a235-ac35bd267ef5" containerName="sg-core" Sep 29 19:17:50 crc kubenswrapper[4792]: I0929 19:17:50.532021 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 29 19:17:50 crc kubenswrapper[4792]: I0929 19:17:50.534177 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Sep 29 19:17:50 crc kubenswrapper[4792]: I0929 19:17:50.534380 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Sep 29 19:17:50 crc kubenswrapper[4792]: I0929 19:17:50.546350 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Sep 29 19:17:50 crc kubenswrapper[4792]: I0929 19:17:50.676504 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e1223f15-a51f-4a79-900d-aedf61ea6515-scripts\") pod \"ceilometer-0\" (UID: \"e1223f15-a51f-4a79-900d-aedf61ea6515\") " pod="openstack/ceilometer-0" Sep 29 19:17:50 crc kubenswrapper[4792]: I0929 19:17:50.676917 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/e1223f15-a51f-4a79-900d-aedf61ea6515-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"e1223f15-a51f-4a79-900d-aedf61ea6515\") " pod="openstack/ceilometer-0" Sep 29 19:17:50 crc kubenswrapper[4792]: I0929 19:17:50.676942 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e1223f15-a51f-4a79-900d-aedf61ea6515-run-httpd\") pod \"ceilometer-0\" (UID: \"e1223f15-a51f-4a79-900d-aedf61ea6515\") " pod="openstack/ceilometer-0" Sep 29 19:17:50 crc kubenswrapper[4792]: I0929 19:17:50.676997 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e1223f15-a51f-4a79-900d-aedf61ea6515-config-data\") pod \"ceilometer-0\" (UID: \"e1223f15-a51f-4a79-900d-aedf61ea6515\") " pod="openstack/ceilometer-0" Sep 29 19:17:50 crc kubenswrapper[4792]: I0929 19:17:50.677071 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e1223f15-a51f-4a79-900d-aedf61ea6515-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"e1223f15-a51f-4a79-900d-aedf61ea6515\") " pod="openstack/ceilometer-0" Sep 29 19:17:50 crc kubenswrapper[4792]: I0929 19:17:50.677190 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5qt25\" (UniqueName: \"kubernetes.io/projected/e1223f15-a51f-4a79-900d-aedf61ea6515-kube-api-access-5qt25\") pod \"ceilometer-0\" (UID: \"e1223f15-a51f-4a79-900d-aedf61ea6515\") " pod="openstack/ceilometer-0" Sep 29 19:17:50 crc kubenswrapper[4792]: I0929 19:17:50.677370 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e1223f15-a51f-4a79-900d-aedf61ea6515-log-httpd\") pod \"ceilometer-0\" (UID: \"e1223f15-a51f-4a79-900d-aedf61ea6515\") " pod="openstack/ceilometer-0" Sep 29 19:17:50 crc kubenswrapper[4792]: I0929 19:17:50.779160 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e1223f15-a51f-4a79-900d-aedf61ea6515-scripts\") pod \"ceilometer-0\" (UID: \"e1223f15-a51f-4a79-900d-aedf61ea6515\") " pod="openstack/ceilometer-0" Sep 29 19:17:50 crc kubenswrapper[4792]: I0929 19:17:50.779230 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/e1223f15-a51f-4a79-900d-aedf61ea6515-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"e1223f15-a51f-4a79-900d-aedf61ea6515\") " pod="openstack/ceilometer-0" Sep 29 19:17:50 crc kubenswrapper[4792]: I0929 19:17:50.779260 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e1223f15-a51f-4a79-900d-aedf61ea6515-run-httpd\") pod \"ceilometer-0\" (UID: \"e1223f15-a51f-4a79-900d-aedf61ea6515\") " pod="openstack/ceilometer-0" Sep 29 19:17:50 crc kubenswrapper[4792]: I0929 19:17:50.779303 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e1223f15-a51f-4a79-900d-aedf61ea6515-config-data\") pod \"ceilometer-0\" (UID: \"e1223f15-a51f-4a79-900d-aedf61ea6515\") " pod="openstack/ceilometer-0" Sep 29 19:17:50 crc kubenswrapper[4792]: I0929 19:17:50.779334 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e1223f15-a51f-4a79-900d-aedf61ea6515-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"e1223f15-a51f-4a79-900d-aedf61ea6515\") " pod="openstack/ceilometer-0" Sep 29 19:17:50 crc kubenswrapper[4792]: I0929 19:17:50.779362 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5qt25\" (UniqueName: \"kubernetes.io/projected/e1223f15-a51f-4a79-900d-aedf61ea6515-kube-api-access-5qt25\") pod \"ceilometer-0\" (UID: \"e1223f15-a51f-4a79-900d-aedf61ea6515\") " pod="openstack/ceilometer-0" Sep 29 19:17:50 crc kubenswrapper[4792]: I0929 19:17:50.779427 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e1223f15-a51f-4a79-900d-aedf61ea6515-log-httpd\") pod \"ceilometer-0\" (UID: \"e1223f15-a51f-4a79-900d-aedf61ea6515\") " pod="openstack/ceilometer-0" Sep 29 19:17:50 crc kubenswrapper[4792]: I0929 19:17:50.779878 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e1223f15-a51f-4a79-900d-aedf61ea6515-log-httpd\") pod \"ceilometer-0\" (UID: \"e1223f15-a51f-4a79-900d-aedf61ea6515\") " pod="openstack/ceilometer-0" Sep 29 19:17:50 crc kubenswrapper[4792]: I0929 19:17:50.780131 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e1223f15-a51f-4a79-900d-aedf61ea6515-run-httpd\") pod \"ceilometer-0\" (UID: \"e1223f15-a51f-4a79-900d-aedf61ea6515\") " pod="openstack/ceilometer-0" Sep 29 19:17:50 crc kubenswrapper[4792]: I0929 19:17:50.784487 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e1223f15-a51f-4a79-900d-aedf61ea6515-config-data\") pod \"ceilometer-0\" (UID: \"e1223f15-a51f-4a79-900d-aedf61ea6515\") " pod="openstack/ceilometer-0" Sep 29 19:17:50 crc kubenswrapper[4792]: I0929 19:17:50.784805 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e1223f15-a51f-4a79-900d-aedf61ea6515-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"e1223f15-a51f-4a79-900d-aedf61ea6515\") " pod="openstack/ceilometer-0" Sep 29 19:17:50 crc kubenswrapper[4792]: I0929 19:17:50.784823 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e1223f15-a51f-4a79-900d-aedf61ea6515-scripts\") pod \"ceilometer-0\" (UID: \"e1223f15-a51f-4a79-900d-aedf61ea6515\") " pod="openstack/ceilometer-0" Sep 29 19:17:50 crc kubenswrapper[4792]: I0929 19:17:50.788312 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/e1223f15-a51f-4a79-900d-aedf61ea6515-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"e1223f15-a51f-4a79-900d-aedf61ea6515\") " pod="openstack/ceilometer-0" Sep 29 19:17:50 crc kubenswrapper[4792]: I0929 19:17:50.797977 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5qt25\" (UniqueName: \"kubernetes.io/projected/e1223f15-a51f-4a79-900d-aedf61ea6515-kube-api-access-5qt25\") pod \"ceilometer-0\" (UID: \"e1223f15-a51f-4a79-900d-aedf61ea6515\") " pod="openstack/ceilometer-0" Sep 29 19:17:50 crc kubenswrapper[4792]: I0929 19:17:50.873879 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 29 19:17:51 crc kubenswrapper[4792]: I0929 19:17:51.030298 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3fb6db85-a6bd-485b-a235-ac35bd267ef5" path="/var/lib/kubelet/pods/3fb6db85-a6bd-485b-a235-ac35bd267ef5/volumes" Sep 29 19:17:51 crc kubenswrapper[4792]: W0929 19:17:51.356812 4792 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode1223f15_a51f_4a79_900d_aedf61ea6515.slice/crio-21bae07bc1703bef01a03dfc6b78b89d222286b14cebd733d9c684e812c16d6f WatchSource:0}: Error finding container 21bae07bc1703bef01a03dfc6b78b89d222286b14cebd733d9c684e812c16d6f: Status 404 returned error can't find the container with id 21bae07bc1703bef01a03dfc6b78b89d222286b14cebd733d9c684e812c16d6f Sep 29 19:17:51 crc kubenswrapper[4792]: I0929 19:17:51.357588 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Sep 29 19:17:52 crc kubenswrapper[4792]: I0929 19:17:52.187939 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e1223f15-a51f-4a79-900d-aedf61ea6515","Type":"ContainerStarted","Data":"fead914b9dae4990de78949f0c6927f2fbc892b74b5bcc0c10290c7c6ab3fc67"} Sep 29 19:17:52 crc kubenswrapper[4792]: I0929 19:17:52.188214 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e1223f15-a51f-4a79-900d-aedf61ea6515","Type":"ContainerStarted","Data":"21bae07bc1703bef01a03dfc6b78b89d222286b14cebd733d9c684e812c16d6f"} Sep 29 19:17:52 crc kubenswrapper[4792]: I0929 19:17:52.189976 4792 generic.go:334] "Generic (PLEG): container finished" podID="eb79e1ef-cf0c-407b-9b37-c7ad8d65a3cc" containerID="33392c65aaa8fe7892add1ddea19e02248b8bee9d86ee8c7d1300e0357c0e768" exitCode=0 Sep 29 19:17:52 crc kubenswrapper[4792]: I0929 19:17:52.190137 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-dfd9c6b56-wq84c" event={"ID":"eb79e1ef-cf0c-407b-9b37-c7ad8d65a3cc","Type":"ContainerDied","Data":"33392c65aaa8fe7892add1ddea19e02248b8bee9d86ee8c7d1300e0357c0e768"} Sep 29 19:17:52 crc kubenswrapper[4792]: I0929 19:17:52.190354 4792 scope.go:117] "RemoveContainer" containerID="30df7ac6c56065d05590b40fa8b60c11ef56e2f8dbc338a1dc4730d3f00fb6e0" Sep 29 19:17:52 crc kubenswrapper[4792]: I0929 19:17:52.963199 4792 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/horizon-dfd9c6b56-wq84c" podUID="eb79e1ef-cf0c-407b-9b37-c7ad8d65a3cc" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.150:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.150:8443: connect: connection refused" Sep 29 19:17:53 crc kubenswrapper[4792]: I0929 19:17:53.202678 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e1223f15-a51f-4a79-900d-aedf61ea6515","Type":"ContainerStarted","Data":"78be82762986ee2f2337a65f517e83f970a822199623e58218aeede506872bc6"} Sep 29 19:17:54 crc kubenswrapper[4792]: I0929 19:17:54.214038 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e1223f15-a51f-4a79-900d-aedf61ea6515","Type":"ContainerStarted","Data":"4f9961a72884934fb318d07a9cadf41e25b23e1fa38d13d1a16a1f2ed652c083"} Sep 29 19:17:55 crc kubenswrapper[4792]: I0929 19:17:55.224311 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e1223f15-a51f-4a79-900d-aedf61ea6515","Type":"ContainerStarted","Data":"37803a381b2e7c8b62b128f03c144e6b74af030cec806470f0595aa5eea9807a"} Sep 29 19:17:55 crc kubenswrapper[4792]: I0929 19:17:55.225275 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Sep 29 19:17:55 crc kubenswrapper[4792]: I0929 19:17:55.252539 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=1.746737105 podStartE2EDuration="5.252523677s" podCreationTimestamp="2025-09-29 19:17:50 +0000 UTC" firstStartedPulling="2025-09-29 19:17:51.409083772 +0000 UTC m=+1283.402391168" lastFinishedPulling="2025-09-29 19:17:54.914870344 +0000 UTC m=+1286.908177740" observedRunningTime="2025-09-29 19:17:55.250277438 +0000 UTC m=+1287.243584844" watchObservedRunningTime="2025-09-29 19:17:55.252523677 +0000 UTC m=+1287.245831073" Sep 29 19:17:59 crc kubenswrapper[4792]: I0929 19:17:59.265393 4792 generic.go:334] "Generic (PLEG): container finished" podID="b3ee2364-5037-46f9-88b2-abb515fdc1b0" containerID="92fc46631ef693a3f819c878eda51eb6a3b4a7dbfc255d6e415ea9ff92df2819" exitCode=0 Sep 29 19:17:59 crc kubenswrapper[4792]: I0929 19:17:59.265493 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-k2c4z" event={"ID":"b3ee2364-5037-46f9-88b2-abb515fdc1b0","Type":"ContainerDied","Data":"92fc46631ef693a3f819c878eda51eb6a3b4a7dbfc255d6e415ea9ff92df2819"} Sep 29 19:18:00 crc kubenswrapper[4792]: I0929 19:18:00.648379 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-k2c4z" Sep 29 19:18:00 crc kubenswrapper[4792]: I0929 19:18:00.766471 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v4zhp\" (UniqueName: \"kubernetes.io/projected/b3ee2364-5037-46f9-88b2-abb515fdc1b0-kube-api-access-v4zhp\") pod \"b3ee2364-5037-46f9-88b2-abb515fdc1b0\" (UID: \"b3ee2364-5037-46f9-88b2-abb515fdc1b0\") " Sep 29 19:18:00 crc kubenswrapper[4792]: I0929 19:18:00.766641 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b3ee2364-5037-46f9-88b2-abb515fdc1b0-combined-ca-bundle\") pod \"b3ee2364-5037-46f9-88b2-abb515fdc1b0\" (UID: \"b3ee2364-5037-46f9-88b2-abb515fdc1b0\") " Sep 29 19:18:00 crc kubenswrapper[4792]: I0929 19:18:00.766697 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b3ee2364-5037-46f9-88b2-abb515fdc1b0-scripts\") pod \"b3ee2364-5037-46f9-88b2-abb515fdc1b0\" (UID: \"b3ee2364-5037-46f9-88b2-abb515fdc1b0\") " Sep 29 19:18:00 crc kubenswrapper[4792]: I0929 19:18:00.767266 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b3ee2364-5037-46f9-88b2-abb515fdc1b0-config-data\") pod \"b3ee2364-5037-46f9-88b2-abb515fdc1b0\" (UID: \"b3ee2364-5037-46f9-88b2-abb515fdc1b0\") " Sep 29 19:18:00 crc kubenswrapper[4792]: I0929 19:18:00.778112 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b3ee2364-5037-46f9-88b2-abb515fdc1b0-scripts" (OuterVolumeSpecName: "scripts") pod "b3ee2364-5037-46f9-88b2-abb515fdc1b0" (UID: "b3ee2364-5037-46f9-88b2-abb515fdc1b0"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:18:00 crc kubenswrapper[4792]: I0929 19:18:00.778159 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b3ee2364-5037-46f9-88b2-abb515fdc1b0-kube-api-access-v4zhp" (OuterVolumeSpecName: "kube-api-access-v4zhp") pod "b3ee2364-5037-46f9-88b2-abb515fdc1b0" (UID: "b3ee2364-5037-46f9-88b2-abb515fdc1b0"). InnerVolumeSpecName "kube-api-access-v4zhp". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:18:00 crc kubenswrapper[4792]: I0929 19:18:00.802539 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b3ee2364-5037-46f9-88b2-abb515fdc1b0-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "b3ee2364-5037-46f9-88b2-abb515fdc1b0" (UID: "b3ee2364-5037-46f9-88b2-abb515fdc1b0"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:18:00 crc kubenswrapper[4792]: I0929 19:18:00.803151 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b3ee2364-5037-46f9-88b2-abb515fdc1b0-config-data" (OuterVolumeSpecName: "config-data") pod "b3ee2364-5037-46f9-88b2-abb515fdc1b0" (UID: "b3ee2364-5037-46f9-88b2-abb515fdc1b0"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:18:00 crc kubenswrapper[4792]: I0929 19:18:00.870503 4792 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b3ee2364-5037-46f9-88b2-abb515fdc1b0-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 19:18:00 crc kubenswrapper[4792]: I0929 19:18:00.870547 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v4zhp\" (UniqueName: \"kubernetes.io/projected/b3ee2364-5037-46f9-88b2-abb515fdc1b0-kube-api-access-v4zhp\") on node \"crc\" DevicePath \"\"" Sep 29 19:18:00 crc kubenswrapper[4792]: I0929 19:18:00.870560 4792 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b3ee2364-5037-46f9-88b2-abb515fdc1b0-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 19:18:00 crc kubenswrapper[4792]: I0929 19:18:00.870572 4792 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b3ee2364-5037-46f9-88b2-abb515fdc1b0-scripts\") on node \"crc\" DevicePath \"\"" Sep 29 19:18:01 crc kubenswrapper[4792]: I0929 19:18:01.290478 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-k2c4z" event={"ID":"b3ee2364-5037-46f9-88b2-abb515fdc1b0","Type":"ContainerDied","Data":"04568a5d1d77f8fef19f4eb505e8db8c38393a4e4969986f35539db0a61efa46"} Sep 29 19:18:01 crc kubenswrapper[4792]: I0929 19:18:01.290540 4792 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="04568a5d1d77f8fef19f4eb505e8db8c38393a4e4969986f35539db0a61efa46" Sep 29 19:18:01 crc kubenswrapper[4792]: I0929 19:18:01.290623 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-k2c4z" Sep 29 19:18:01 crc kubenswrapper[4792]: I0929 19:18:01.474071 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-conductor-0"] Sep 29 19:18:01 crc kubenswrapper[4792]: E0929 19:18:01.474527 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b3ee2364-5037-46f9-88b2-abb515fdc1b0" containerName="nova-cell0-conductor-db-sync" Sep 29 19:18:01 crc kubenswrapper[4792]: I0929 19:18:01.474552 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="b3ee2364-5037-46f9-88b2-abb515fdc1b0" containerName="nova-cell0-conductor-db-sync" Sep 29 19:18:01 crc kubenswrapper[4792]: I0929 19:18:01.474771 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="b3ee2364-5037-46f9-88b2-abb515fdc1b0" containerName="nova-cell0-conductor-db-sync" Sep 29 19:18:01 crc kubenswrapper[4792]: I0929 19:18:01.475496 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Sep 29 19:18:01 crc kubenswrapper[4792]: I0929 19:18:01.477713 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-config-data" Sep 29 19:18:01 crc kubenswrapper[4792]: I0929 19:18:01.477758 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-nova-dockercfg-xxzkx" Sep 29 19:18:01 crc kubenswrapper[4792]: I0929 19:18:01.492477 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Sep 29 19:18:01 crc kubenswrapper[4792]: I0929 19:18:01.584100 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8c04cb97-af56-4b40-b086-990e57b48c15-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"8c04cb97-af56-4b40-b086-990e57b48c15\") " pod="openstack/nova-cell0-conductor-0" Sep 29 19:18:01 crc kubenswrapper[4792]: I0929 19:18:01.584187 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2rcnk\" (UniqueName: \"kubernetes.io/projected/8c04cb97-af56-4b40-b086-990e57b48c15-kube-api-access-2rcnk\") pod \"nova-cell0-conductor-0\" (UID: \"8c04cb97-af56-4b40-b086-990e57b48c15\") " pod="openstack/nova-cell0-conductor-0" Sep 29 19:18:01 crc kubenswrapper[4792]: I0929 19:18:01.584230 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8c04cb97-af56-4b40-b086-990e57b48c15-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"8c04cb97-af56-4b40-b086-990e57b48c15\") " pod="openstack/nova-cell0-conductor-0" Sep 29 19:18:01 crc kubenswrapper[4792]: I0929 19:18:01.685689 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8c04cb97-af56-4b40-b086-990e57b48c15-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"8c04cb97-af56-4b40-b086-990e57b48c15\") " pod="openstack/nova-cell0-conductor-0" Sep 29 19:18:01 crc kubenswrapper[4792]: I0929 19:18:01.686074 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2rcnk\" (UniqueName: \"kubernetes.io/projected/8c04cb97-af56-4b40-b086-990e57b48c15-kube-api-access-2rcnk\") pod \"nova-cell0-conductor-0\" (UID: \"8c04cb97-af56-4b40-b086-990e57b48c15\") " pod="openstack/nova-cell0-conductor-0" Sep 29 19:18:01 crc kubenswrapper[4792]: I0929 19:18:01.686116 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8c04cb97-af56-4b40-b086-990e57b48c15-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"8c04cb97-af56-4b40-b086-990e57b48c15\") " pod="openstack/nova-cell0-conductor-0" Sep 29 19:18:01 crc kubenswrapper[4792]: I0929 19:18:01.690041 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8c04cb97-af56-4b40-b086-990e57b48c15-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"8c04cb97-af56-4b40-b086-990e57b48c15\") " pod="openstack/nova-cell0-conductor-0" Sep 29 19:18:01 crc kubenswrapper[4792]: I0929 19:18:01.700571 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8c04cb97-af56-4b40-b086-990e57b48c15-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"8c04cb97-af56-4b40-b086-990e57b48c15\") " pod="openstack/nova-cell0-conductor-0" Sep 29 19:18:01 crc kubenswrapper[4792]: I0929 19:18:01.708512 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2rcnk\" (UniqueName: \"kubernetes.io/projected/8c04cb97-af56-4b40-b086-990e57b48c15-kube-api-access-2rcnk\") pod \"nova-cell0-conductor-0\" (UID: \"8c04cb97-af56-4b40-b086-990e57b48c15\") " pod="openstack/nova-cell0-conductor-0" Sep 29 19:18:01 crc kubenswrapper[4792]: I0929 19:18:01.793909 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Sep 29 19:18:02 crc kubenswrapper[4792]: I0929 19:18:02.265957 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Sep 29 19:18:02 crc kubenswrapper[4792]: I0929 19:18:02.301429 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"8c04cb97-af56-4b40-b086-990e57b48c15","Type":"ContainerStarted","Data":"f8b81310975e035145e917eaa4223128e1c055cae0184bcb49b1c01a6574c9b1"} Sep 29 19:18:02 crc kubenswrapper[4792]: I0929 19:18:02.963227 4792 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/horizon-dfd9c6b56-wq84c" podUID="eb79e1ef-cf0c-407b-9b37-c7ad8d65a3cc" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.150:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.150:8443: connect: connection refused" Sep 29 19:18:03 crc kubenswrapper[4792]: I0929 19:18:03.313890 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"8c04cb97-af56-4b40-b086-990e57b48c15","Type":"ContainerStarted","Data":"d6b2382677e2cc6aec51b819f365703a3296dcee8cb9a4eb285d7634555eb66e"} Sep 29 19:18:03 crc kubenswrapper[4792]: I0929 19:18:03.314291 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell0-conductor-0" Sep 29 19:18:03 crc kubenswrapper[4792]: I0929 19:18:03.339843 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-conductor-0" podStartSLOduration=2.33982192 podStartE2EDuration="2.33982192s" podCreationTimestamp="2025-09-29 19:18:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 19:18:03.332949051 +0000 UTC m=+1295.326256457" watchObservedRunningTime="2025-09-29 19:18:03.33982192 +0000 UTC m=+1295.333129336" Sep 29 19:18:11 crc kubenswrapper[4792]: I0929 19:18:11.825703 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell0-conductor-0" Sep 29 19:18:12 crc kubenswrapper[4792]: I0929 19:18:12.365005 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-cell-mapping-zzmwh"] Sep 29 19:18:12 crc kubenswrapper[4792]: I0929 19:18:12.366382 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-zzmwh" Sep 29 19:18:12 crc kubenswrapper[4792]: I0929 19:18:12.368635 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-manage-scripts" Sep 29 19:18:12 crc kubenswrapper[4792]: I0929 19:18:12.368654 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-manage-config-data" Sep 29 19:18:12 crc kubenswrapper[4792]: I0929 19:18:12.382417 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-cell-mapping-zzmwh"] Sep 29 19:18:12 crc kubenswrapper[4792]: I0929 19:18:12.484667 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d5trf\" (UniqueName: \"kubernetes.io/projected/683f150a-35bc-423d-af3a-005d3a967d88-kube-api-access-d5trf\") pod \"nova-cell0-cell-mapping-zzmwh\" (UID: \"683f150a-35bc-423d-af3a-005d3a967d88\") " pod="openstack/nova-cell0-cell-mapping-zzmwh" Sep 29 19:18:12 crc kubenswrapper[4792]: I0929 19:18:12.484970 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/683f150a-35bc-423d-af3a-005d3a967d88-config-data\") pod \"nova-cell0-cell-mapping-zzmwh\" (UID: \"683f150a-35bc-423d-af3a-005d3a967d88\") " pod="openstack/nova-cell0-cell-mapping-zzmwh" Sep 29 19:18:12 crc kubenswrapper[4792]: I0929 19:18:12.485726 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/683f150a-35bc-423d-af3a-005d3a967d88-scripts\") pod \"nova-cell0-cell-mapping-zzmwh\" (UID: \"683f150a-35bc-423d-af3a-005d3a967d88\") " pod="openstack/nova-cell0-cell-mapping-zzmwh" Sep 29 19:18:12 crc kubenswrapper[4792]: I0929 19:18:12.485783 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/683f150a-35bc-423d-af3a-005d3a967d88-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-zzmwh\" (UID: \"683f150a-35bc-423d-af3a-005d3a967d88\") " pod="openstack/nova-cell0-cell-mapping-zzmwh" Sep 29 19:18:12 crc kubenswrapper[4792]: I0929 19:18:12.553652 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Sep 29 19:18:12 crc kubenswrapper[4792]: I0929 19:18:12.555222 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Sep 29 19:18:12 crc kubenswrapper[4792]: I0929 19:18:12.562751 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Sep 29 19:18:12 crc kubenswrapper[4792]: I0929 19:18:12.581574 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Sep 29 19:18:12 crc kubenswrapper[4792]: I0929 19:18:12.587953 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d5trf\" (UniqueName: \"kubernetes.io/projected/683f150a-35bc-423d-af3a-005d3a967d88-kube-api-access-d5trf\") pod \"nova-cell0-cell-mapping-zzmwh\" (UID: \"683f150a-35bc-423d-af3a-005d3a967d88\") " pod="openstack/nova-cell0-cell-mapping-zzmwh" Sep 29 19:18:12 crc kubenswrapper[4792]: I0929 19:18:12.588007 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/683f150a-35bc-423d-af3a-005d3a967d88-config-data\") pod \"nova-cell0-cell-mapping-zzmwh\" (UID: \"683f150a-35bc-423d-af3a-005d3a967d88\") " pod="openstack/nova-cell0-cell-mapping-zzmwh" Sep 29 19:18:12 crc kubenswrapper[4792]: I0929 19:18:12.588031 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/683f150a-35bc-423d-af3a-005d3a967d88-scripts\") pod \"nova-cell0-cell-mapping-zzmwh\" (UID: \"683f150a-35bc-423d-af3a-005d3a967d88\") " pod="openstack/nova-cell0-cell-mapping-zzmwh" Sep 29 19:18:12 crc kubenswrapper[4792]: I0929 19:18:12.588059 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/683f150a-35bc-423d-af3a-005d3a967d88-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-zzmwh\" (UID: \"683f150a-35bc-423d-af3a-005d3a967d88\") " pod="openstack/nova-cell0-cell-mapping-zzmwh" Sep 29 19:18:12 crc kubenswrapper[4792]: I0929 19:18:12.594552 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/683f150a-35bc-423d-af3a-005d3a967d88-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-zzmwh\" (UID: \"683f150a-35bc-423d-af3a-005d3a967d88\") " pod="openstack/nova-cell0-cell-mapping-zzmwh" Sep 29 19:18:12 crc kubenswrapper[4792]: I0929 19:18:12.595566 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/683f150a-35bc-423d-af3a-005d3a967d88-scripts\") pod \"nova-cell0-cell-mapping-zzmwh\" (UID: \"683f150a-35bc-423d-af3a-005d3a967d88\") " pod="openstack/nova-cell0-cell-mapping-zzmwh" Sep 29 19:18:12 crc kubenswrapper[4792]: I0929 19:18:12.612092 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/683f150a-35bc-423d-af3a-005d3a967d88-config-data\") pod \"nova-cell0-cell-mapping-zzmwh\" (UID: \"683f150a-35bc-423d-af3a-005d3a967d88\") " pod="openstack/nova-cell0-cell-mapping-zzmwh" Sep 29 19:18:12 crc kubenswrapper[4792]: I0929 19:18:12.628931 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Sep 29 19:18:12 crc kubenswrapper[4792]: I0929 19:18:12.630425 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Sep 29 19:18:12 crc kubenswrapper[4792]: I0929 19:18:12.647755 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Sep 29 19:18:12 crc kubenswrapper[4792]: I0929 19:18:12.660124 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d5trf\" (UniqueName: \"kubernetes.io/projected/683f150a-35bc-423d-af3a-005d3a967d88-kube-api-access-d5trf\") pod \"nova-cell0-cell-mapping-zzmwh\" (UID: \"683f150a-35bc-423d-af3a-005d3a967d88\") " pod="openstack/nova-cell0-cell-mapping-zzmwh" Sep 29 19:18:12 crc kubenswrapper[4792]: I0929 19:18:12.672655 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Sep 29 19:18:12 crc kubenswrapper[4792]: I0929 19:18:12.689081 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ddjzp\" (UniqueName: \"kubernetes.io/projected/98f4b269-3703-47ff-826a-f9f4f9ac5a98-kube-api-access-ddjzp\") pod \"nova-metadata-0\" (UID: \"98f4b269-3703-47ff-826a-f9f4f9ac5a98\") " pod="openstack/nova-metadata-0" Sep 29 19:18:12 crc kubenswrapper[4792]: I0929 19:18:12.689127 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/98f4b269-3703-47ff-826a-f9f4f9ac5a98-logs\") pod \"nova-metadata-0\" (UID: \"98f4b269-3703-47ff-826a-f9f4f9ac5a98\") " pod="openstack/nova-metadata-0" Sep 29 19:18:12 crc kubenswrapper[4792]: I0929 19:18:12.689150 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/98f4b269-3703-47ff-826a-f9f4f9ac5a98-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"98f4b269-3703-47ff-826a-f9f4f9ac5a98\") " pod="openstack/nova-metadata-0" Sep 29 19:18:12 crc kubenswrapper[4792]: I0929 19:18:12.689218 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/98f4b269-3703-47ff-826a-f9f4f9ac5a98-config-data\") pod \"nova-metadata-0\" (UID: \"98f4b269-3703-47ff-826a-f9f4f9ac5a98\") " pod="openstack/nova-metadata-0" Sep 29 19:18:12 crc kubenswrapper[4792]: I0929 19:18:12.701081 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-zzmwh" Sep 29 19:18:12 crc kubenswrapper[4792]: I0929 19:18:12.787074 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Sep 29 19:18:12 crc kubenswrapper[4792]: I0929 19:18:12.788264 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Sep 29 19:18:12 crc kubenswrapper[4792]: I0929 19:18:12.800504 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Sep 29 19:18:12 crc kubenswrapper[4792]: I0929 19:18:12.802190 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/601d6005-0364-43d4-89bc-ecdc60ef2bd9-logs\") pod \"nova-api-0\" (UID: \"601d6005-0364-43d4-89bc-ecdc60ef2bd9\") " pod="openstack/nova-api-0" Sep 29 19:18:12 crc kubenswrapper[4792]: I0929 19:18:12.802320 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ddjzp\" (UniqueName: \"kubernetes.io/projected/98f4b269-3703-47ff-826a-f9f4f9ac5a98-kube-api-access-ddjzp\") pod \"nova-metadata-0\" (UID: \"98f4b269-3703-47ff-826a-f9f4f9ac5a98\") " pod="openstack/nova-metadata-0" Sep 29 19:18:12 crc kubenswrapper[4792]: I0929 19:18:12.802437 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pk6dc\" (UniqueName: \"kubernetes.io/projected/601d6005-0364-43d4-89bc-ecdc60ef2bd9-kube-api-access-pk6dc\") pod \"nova-api-0\" (UID: \"601d6005-0364-43d4-89bc-ecdc60ef2bd9\") " pod="openstack/nova-api-0" Sep 29 19:18:12 crc kubenswrapper[4792]: I0929 19:18:12.802554 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/98f4b269-3703-47ff-826a-f9f4f9ac5a98-logs\") pod \"nova-metadata-0\" (UID: \"98f4b269-3703-47ff-826a-f9f4f9ac5a98\") " pod="openstack/nova-metadata-0" Sep 29 19:18:12 crc kubenswrapper[4792]: I0929 19:18:12.802657 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/98f4b269-3703-47ff-826a-f9f4f9ac5a98-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"98f4b269-3703-47ff-826a-f9f4f9ac5a98\") " pod="openstack/nova-metadata-0" Sep 29 19:18:12 crc kubenswrapper[4792]: I0929 19:18:12.802750 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/601d6005-0364-43d4-89bc-ecdc60ef2bd9-config-data\") pod \"nova-api-0\" (UID: \"601d6005-0364-43d4-89bc-ecdc60ef2bd9\") " pod="openstack/nova-api-0" Sep 29 19:18:12 crc kubenswrapper[4792]: I0929 19:18:12.803306 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/98f4b269-3703-47ff-826a-f9f4f9ac5a98-logs\") pod \"nova-metadata-0\" (UID: \"98f4b269-3703-47ff-826a-f9f4f9ac5a98\") " pod="openstack/nova-metadata-0" Sep 29 19:18:12 crc kubenswrapper[4792]: I0929 19:18:12.803335 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/98f4b269-3703-47ff-826a-f9f4f9ac5a98-config-data\") pod \"nova-metadata-0\" (UID: \"98f4b269-3703-47ff-826a-f9f4f9ac5a98\") " pod="openstack/nova-metadata-0" Sep 29 19:18:12 crc kubenswrapper[4792]: I0929 19:18:12.803553 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/601d6005-0364-43d4-89bc-ecdc60ef2bd9-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"601d6005-0364-43d4-89bc-ecdc60ef2bd9\") " pod="openstack/nova-api-0" Sep 29 19:18:12 crc kubenswrapper[4792]: I0929 19:18:12.813034 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Sep 29 19:18:12 crc kubenswrapper[4792]: I0929 19:18:12.814564 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/98f4b269-3703-47ff-826a-f9f4f9ac5a98-config-data\") pod \"nova-metadata-0\" (UID: \"98f4b269-3703-47ff-826a-f9f4f9ac5a98\") " pod="openstack/nova-metadata-0" Sep 29 19:18:12 crc kubenswrapper[4792]: I0929 19:18:12.834445 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/98f4b269-3703-47ff-826a-f9f4f9ac5a98-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"98f4b269-3703-47ff-826a-f9f4f9ac5a98\") " pod="openstack/nova-metadata-0" Sep 29 19:18:12 crc kubenswrapper[4792]: I0929 19:18:12.904944 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/32dbf736-0a8a-4dfc-a3e3-ae36c3b2aa54-config-data\") pod \"nova-scheduler-0\" (UID: \"32dbf736-0a8a-4dfc-a3e3-ae36c3b2aa54\") " pod="openstack/nova-scheduler-0" Sep 29 19:18:12 crc kubenswrapper[4792]: I0929 19:18:12.905007 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/32dbf736-0a8a-4dfc-a3e3-ae36c3b2aa54-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"32dbf736-0a8a-4dfc-a3e3-ae36c3b2aa54\") " pod="openstack/nova-scheduler-0" Sep 29 19:18:12 crc kubenswrapper[4792]: I0929 19:18:12.905038 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/601d6005-0364-43d4-89bc-ecdc60ef2bd9-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"601d6005-0364-43d4-89bc-ecdc60ef2bd9\") " pod="openstack/nova-api-0" Sep 29 19:18:12 crc kubenswrapper[4792]: I0929 19:18:12.905216 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r664z\" (UniqueName: \"kubernetes.io/projected/32dbf736-0a8a-4dfc-a3e3-ae36c3b2aa54-kube-api-access-r664z\") pod \"nova-scheduler-0\" (UID: \"32dbf736-0a8a-4dfc-a3e3-ae36c3b2aa54\") " pod="openstack/nova-scheduler-0" Sep 29 19:18:12 crc kubenswrapper[4792]: I0929 19:18:12.905270 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/601d6005-0364-43d4-89bc-ecdc60ef2bd9-logs\") pod \"nova-api-0\" (UID: \"601d6005-0364-43d4-89bc-ecdc60ef2bd9\") " pod="openstack/nova-api-0" Sep 29 19:18:12 crc kubenswrapper[4792]: I0929 19:18:12.905297 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pk6dc\" (UniqueName: \"kubernetes.io/projected/601d6005-0364-43d4-89bc-ecdc60ef2bd9-kube-api-access-pk6dc\") pod \"nova-api-0\" (UID: \"601d6005-0364-43d4-89bc-ecdc60ef2bd9\") " pod="openstack/nova-api-0" Sep 29 19:18:12 crc kubenswrapper[4792]: I0929 19:18:12.905325 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/601d6005-0364-43d4-89bc-ecdc60ef2bd9-config-data\") pod \"nova-api-0\" (UID: \"601d6005-0364-43d4-89bc-ecdc60ef2bd9\") " pod="openstack/nova-api-0" Sep 29 19:18:12 crc kubenswrapper[4792]: I0929 19:18:12.905513 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ddjzp\" (UniqueName: \"kubernetes.io/projected/98f4b269-3703-47ff-826a-f9f4f9ac5a98-kube-api-access-ddjzp\") pod \"nova-metadata-0\" (UID: \"98f4b269-3703-47ff-826a-f9f4f9ac5a98\") " pod="openstack/nova-metadata-0" Sep 29 19:18:12 crc kubenswrapper[4792]: I0929 19:18:12.906253 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/601d6005-0364-43d4-89bc-ecdc60ef2bd9-logs\") pod \"nova-api-0\" (UID: \"601d6005-0364-43d4-89bc-ecdc60ef2bd9\") " pod="openstack/nova-api-0" Sep 29 19:18:12 crc kubenswrapper[4792]: I0929 19:18:12.910732 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/601d6005-0364-43d4-89bc-ecdc60ef2bd9-config-data\") pod \"nova-api-0\" (UID: \"601d6005-0364-43d4-89bc-ecdc60ef2bd9\") " pod="openstack/nova-api-0" Sep 29 19:18:12 crc kubenswrapper[4792]: I0929 19:18:12.913475 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/601d6005-0364-43d4-89bc-ecdc60ef2bd9-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"601d6005-0364-43d4-89bc-ecdc60ef2bd9\") " pod="openstack/nova-api-0" Sep 29 19:18:12 crc kubenswrapper[4792]: I0929 19:18:12.937490 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-bccf8f775-5gflj"] Sep 29 19:18:12 crc kubenswrapper[4792]: I0929 19:18:12.939078 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-bccf8f775-5gflj" Sep 29 19:18:12 crc kubenswrapper[4792]: I0929 19:18:12.955037 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Sep 29 19:18:12 crc kubenswrapper[4792]: I0929 19:18:12.956108 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Sep 29 19:18:12 crc kubenswrapper[4792]: I0929 19:18:12.961487 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-novncproxy-config-data" Sep 29 19:18:12 crc kubenswrapper[4792]: I0929 19:18:12.963838 4792 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/horizon-dfd9c6b56-wq84c" podUID="eb79e1ef-cf0c-407b-9b37-c7ad8d65a3cc" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.150:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.150:8443: connect: connection refused" Sep 29 19:18:12 crc kubenswrapper[4792]: I0929 19:18:12.964038 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-dfd9c6b56-wq84c" Sep 29 19:18:12 crc kubenswrapper[4792]: I0929 19:18:12.969612 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pk6dc\" (UniqueName: \"kubernetes.io/projected/601d6005-0364-43d4-89bc-ecdc60ef2bd9-kube-api-access-pk6dc\") pod \"nova-api-0\" (UID: \"601d6005-0364-43d4-89bc-ecdc60ef2bd9\") " pod="openstack/nova-api-0" Sep 29 19:18:12 crc kubenswrapper[4792]: I0929 19:18:12.979431 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-bccf8f775-5gflj"] Sep 29 19:18:13 crc kubenswrapper[4792]: I0929 19:18:13.008187 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/32dbf736-0a8a-4dfc-a3e3-ae36c3b2aa54-config-data\") pod \"nova-scheduler-0\" (UID: \"32dbf736-0a8a-4dfc-a3e3-ae36c3b2aa54\") " pod="openstack/nova-scheduler-0" Sep 29 19:18:13 crc kubenswrapper[4792]: I0929 19:18:13.008249 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d967b8db-0961-4897-a859-f1d46d30dba8-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"d967b8db-0961-4897-a859-f1d46d30dba8\") " pod="openstack/nova-cell1-novncproxy-0" Sep 29 19:18:13 crc kubenswrapper[4792]: I0929 19:18:13.008282 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/32dbf736-0a8a-4dfc-a3e3-ae36c3b2aa54-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"32dbf736-0a8a-4dfc-a3e3-ae36c3b2aa54\") " pod="openstack/nova-scheduler-0" Sep 29 19:18:13 crc kubenswrapper[4792]: I0929 19:18:13.008303 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c6pt2\" (UniqueName: \"kubernetes.io/projected/d967b8db-0961-4897-a859-f1d46d30dba8-kube-api-access-c6pt2\") pod \"nova-cell1-novncproxy-0\" (UID: \"d967b8db-0961-4897-a859-f1d46d30dba8\") " pod="openstack/nova-cell1-novncproxy-0" Sep 29 19:18:13 crc kubenswrapper[4792]: I0929 19:18:13.008328 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/6c32ebe8-d6f1-4ce5-be8e-42cadbdfe789-dns-svc\") pod \"dnsmasq-dns-bccf8f775-5gflj\" (UID: \"6c32ebe8-d6f1-4ce5-be8e-42cadbdfe789\") " pod="openstack/dnsmasq-dns-bccf8f775-5gflj" Sep 29 19:18:13 crc kubenswrapper[4792]: I0929 19:18:13.008366 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d967b8db-0961-4897-a859-f1d46d30dba8-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"d967b8db-0961-4897-a859-f1d46d30dba8\") " pod="openstack/nova-cell1-novncproxy-0" Sep 29 19:18:13 crc kubenswrapper[4792]: I0929 19:18:13.008402 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r664z\" (UniqueName: \"kubernetes.io/projected/32dbf736-0a8a-4dfc-a3e3-ae36c3b2aa54-kube-api-access-r664z\") pod \"nova-scheduler-0\" (UID: \"32dbf736-0a8a-4dfc-a3e3-ae36c3b2aa54\") " pod="openstack/nova-scheduler-0" Sep 29 19:18:13 crc kubenswrapper[4792]: I0929 19:18:13.008454 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6c32ebe8-d6f1-4ce5-be8e-42cadbdfe789-config\") pod \"dnsmasq-dns-bccf8f775-5gflj\" (UID: \"6c32ebe8-d6f1-4ce5-be8e-42cadbdfe789\") " pod="openstack/dnsmasq-dns-bccf8f775-5gflj" Sep 29 19:18:13 crc kubenswrapper[4792]: I0929 19:18:13.008489 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/6c32ebe8-d6f1-4ce5-be8e-42cadbdfe789-ovsdbserver-sb\") pod \"dnsmasq-dns-bccf8f775-5gflj\" (UID: \"6c32ebe8-d6f1-4ce5-be8e-42cadbdfe789\") " pod="openstack/dnsmasq-dns-bccf8f775-5gflj" Sep 29 19:18:13 crc kubenswrapper[4792]: I0929 19:18:13.008510 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/6c32ebe8-d6f1-4ce5-be8e-42cadbdfe789-dns-swift-storage-0\") pod \"dnsmasq-dns-bccf8f775-5gflj\" (UID: \"6c32ebe8-d6f1-4ce5-be8e-42cadbdfe789\") " pod="openstack/dnsmasq-dns-bccf8f775-5gflj" Sep 29 19:18:13 crc kubenswrapper[4792]: I0929 19:18:13.008527 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/6c32ebe8-d6f1-4ce5-be8e-42cadbdfe789-ovsdbserver-nb\") pod \"dnsmasq-dns-bccf8f775-5gflj\" (UID: \"6c32ebe8-d6f1-4ce5-be8e-42cadbdfe789\") " pod="openstack/dnsmasq-dns-bccf8f775-5gflj" Sep 29 19:18:13 crc kubenswrapper[4792]: I0929 19:18:13.008549 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rgm54\" (UniqueName: \"kubernetes.io/projected/6c32ebe8-d6f1-4ce5-be8e-42cadbdfe789-kube-api-access-rgm54\") pod \"dnsmasq-dns-bccf8f775-5gflj\" (UID: \"6c32ebe8-d6f1-4ce5-be8e-42cadbdfe789\") " pod="openstack/dnsmasq-dns-bccf8f775-5gflj" Sep 29 19:18:13 crc kubenswrapper[4792]: I0929 19:18:13.010197 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Sep 29 19:18:13 crc kubenswrapper[4792]: I0929 19:18:13.012810 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/32dbf736-0a8a-4dfc-a3e3-ae36c3b2aa54-config-data\") pod \"nova-scheduler-0\" (UID: \"32dbf736-0a8a-4dfc-a3e3-ae36c3b2aa54\") " pod="openstack/nova-scheduler-0" Sep 29 19:18:13 crc kubenswrapper[4792]: I0929 19:18:13.013034 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/32dbf736-0a8a-4dfc-a3e3-ae36c3b2aa54-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"32dbf736-0a8a-4dfc-a3e3-ae36c3b2aa54\") " pod="openstack/nova-scheduler-0" Sep 29 19:18:13 crc kubenswrapper[4792]: I0929 19:18:13.099330 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r664z\" (UniqueName: \"kubernetes.io/projected/32dbf736-0a8a-4dfc-a3e3-ae36c3b2aa54-kube-api-access-r664z\") pod \"nova-scheduler-0\" (UID: \"32dbf736-0a8a-4dfc-a3e3-ae36c3b2aa54\") " pod="openstack/nova-scheduler-0" Sep 29 19:18:13 crc kubenswrapper[4792]: I0929 19:18:13.117937 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c6pt2\" (UniqueName: \"kubernetes.io/projected/d967b8db-0961-4897-a859-f1d46d30dba8-kube-api-access-c6pt2\") pod \"nova-cell1-novncproxy-0\" (UID: \"d967b8db-0961-4897-a859-f1d46d30dba8\") " pod="openstack/nova-cell1-novncproxy-0" Sep 29 19:18:13 crc kubenswrapper[4792]: I0929 19:18:13.117995 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/6c32ebe8-d6f1-4ce5-be8e-42cadbdfe789-dns-svc\") pod \"dnsmasq-dns-bccf8f775-5gflj\" (UID: \"6c32ebe8-d6f1-4ce5-be8e-42cadbdfe789\") " pod="openstack/dnsmasq-dns-bccf8f775-5gflj" Sep 29 19:18:13 crc kubenswrapper[4792]: I0929 19:18:13.118033 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d967b8db-0961-4897-a859-f1d46d30dba8-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"d967b8db-0961-4897-a859-f1d46d30dba8\") " pod="openstack/nova-cell1-novncproxy-0" Sep 29 19:18:13 crc kubenswrapper[4792]: I0929 19:18:13.118114 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6c32ebe8-d6f1-4ce5-be8e-42cadbdfe789-config\") pod \"dnsmasq-dns-bccf8f775-5gflj\" (UID: \"6c32ebe8-d6f1-4ce5-be8e-42cadbdfe789\") " pod="openstack/dnsmasq-dns-bccf8f775-5gflj" Sep 29 19:18:13 crc kubenswrapper[4792]: I0929 19:18:13.121384 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/6c32ebe8-d6f1-4ce5-be8e-42cadbdfe789-ovsdbserver-sb\") pod \"dnsmasq-dns-bccf8f775-5gflj\" (UID: \"6c32ebe8-d6f1-4ce5-be8e-42cadbdfe789\") " pod="openstack/dnsmasq-dns-bccf8f775-5gflj" Sep 29 19:18:13 crc kubenswrapper[4792]: I0929 19:18:13.121454 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/6c32ebe8-d6f1-4ce5-be8e-42cadbdfe789-dns-swift-storage-0\") pod \"dnsmasq-dns-bccf8f775-5gflj\" (UID: \"6c32ebe8-d6f1-4ce5-be8e-42cadbdfe789\") " pod="openstack/dnsmasq-dns-bccf8f775-5gflj" Sep 29 19:18:13 crc kubenswrapper[4792]: I0929 19:18:13.121475 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/6c32ebe8-d6f1-4ce5-be8e-42cadbdfe789-ovsdbserver-nb\") pod \"dnsmasq-dns-bccf8f775-5gflj\" (UID: \"6c32ebe8-d6f1-4ce5-be8e-42cadbdfe789\") " pod="openstack/dnsmasq-dns-bccf8f775-5gflj" Sep 29 19:18:13 crc kubenswrapper[4792]: I0929 19:18:13.121870 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rgm54\" (UniqueName: \"kubernetes.io/projected/6c32ebe8-d6f1-4ce5-be8e-42cadbdfe789-kube-api-access-rgm54\") pod \"dnsmasq-dns-bccf8f775-5gflj\" (UID: \"6c32ebe8-d6f1-4ce5-be8e-42cadbdfe789\") " pod="openstack/dnsmasq-dns-bccf8f775-5gflj" Sep 29 19:18:13 crc kubenswrapper[4792]: I0929 19:18:13.122005 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d967b8db-0961-4897-a859-f1d46d30dba8-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"d967b8db-0961-4897-a859-f1d46d30dba8\") " pod="openstack/nova-cell1-novncproxy-0" Sep 29 19:18:13 crc kubenswrapper[4792]: I0929 19:18:13.126525 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6c32ebe8-d6f1-4ce5-be8e-42cadbdfe789-config\") pod \"dnsmasq-dns-bccf8f775-5gflj\" (UID: \"6c32ebe8-d6f1-4ce5-be8e-42cadbdfe789\") " pod="openstack/dnsmasq-dns-bccf8f775-5gflj" Sep 29 19:18:13 crc kubenswrapper[4792]: I0929 19:18:13.128997 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/6c32ebe8-d6f1-4ce5-be8e-42cadbdfe789-ovsdbserver-nb\") pod \"dnsmasq-dns-bccf8f775-5gflj\" (UID: \"6c32ebe8-d6f1-4ce5-be8e-42cadbdfe789\") " pod="openstack/dnsmasq-dns-bccf8f775-5gflj" Sep 29 19:18:13 crc kubenswrapper[4792]: I0929 19:18:13.132652 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d967b8db-0961-4897-a859-f1d46d30dba8-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"d967b8db-0961-4897-a859-f1d46d30dba8\") " pod="openstack/nova-cell1-novncproxy-0" Sep 29 19:18:13 crc kubenswrapper[4792]: I0929 19:18:13.138102 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/6c32ebe8-d6f1-4ce5-be8e-42cadbdfe789-dns-swift-storage-0\") pod \"dnsmasq-dns-bccf8f775-5gflj\" (UID: \"6c32ebe8-d6f1-4ce5-be8e-42cadbdfe789\") " pod="openstack/dnsmasq-dns-bccf8f775-5gflj" Sep 29 19:18:13 crc kubenswrapper[4792]: I0929 19:18:13.138916 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/6c32ebe8-d6f1-4ce5-be8e-42cadbdfe789-ovsdbserver-sb\") pod \"dnsmasq-dns-bccf8f775-5gflj\" (UID: \"6c32ebe8-d6f1-4ce5-be8e-42cadbdfe789\") " pod="openstack/dnsmasq-dns-bccf8f775-5gflj" Sep 29 19:18:13 crc kubenswrapper[4792]: I0929 19:18:13.140257 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/6c32ebe8-d6f1-4ce5-be8e-42cadbdfe789-dns-svc\") pod \"dnsmasq-dns-bccf8f775-5gflj\" (UID: \"6c32ebe8-d6f1-4ce5-be8e-42cadbdfe789\") " pod="openstack/dnsmasq-dns-bccf8f775-5gflj" Sep 29 19:18:13 crc kubenswrapper[4792]: I0929 19:18:13.151778 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d967b8db-0961-4897-a859-f1d46d30dba8-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"d967b8db-0961-4897-a859-f1d46d30dba8\") " pod="openstack/nova-cell1-novncproxy-0" Sep 29 19:18:13 crc kubenswrapper[4792]: I0929 19:18:13.157542 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c6pt2\" (UniqueName: \"kubernetes.io/projected/d967b8db-0961-4897-a859-f1d46d30dba8-kube-api-access-c6pt2\") pod \"nova-cell1-novncproxy-0\" (UID: \"d967b8db-0961-4897-a859-f1d46d30dba8\") " pod="openstack/nova-cell1-novncproxy-0" Sep 29 19:18:13 crc kubenswrapper[4792]: I0929 19:18:13.157726 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rgm54\" (UniqueName: \"kubernetes.io/projected/6c32ebe8-d6f1-4ce5-be8e-42cadbdfe789-kube-api-access-rgm54\") pod \"dnsmasq-dns-bccf8f775-5gflj\" (UID: \"6c32ebe8-d6f1-4ce5-be8e-42cadbdfe789\") " pod="openstack/dnsmasq-dns-bccf8f775-5gflj" Sep 29 19:18:13 crc kubenswrapper[4792]: I0929 19:18:13.163073 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Sep 29 19:18:13 crc kubenswrapper[4792]: I0929 19:18:13.178584 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Sep 29 19:18:13 crc kubenswrapper[4792]: I0929 19:18:13.210930 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Sep 29 19:18:13 crc kubenswrapper[4792]: I0929 19:18:13.304342 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-bccf8f775-5gflj" Sep 29 19:18:13 crc kubenswrapper[4792]: I0929 19:18:13.322101 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Sep 29 19:18:13 crc kubenswrapper[4792]: I0929 19:18:13.554155 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-cell-mapping-zzmwh"] Sep 29 19:18:14 crc kubenswrapper[4792]: I0929 19:18:14.003043 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Sep 29 19:18:14 crc kubenswrapper[4792]: I0929 19:18:14.012308 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Sep 29 19:18:14 crc kubenswrapper[4792]: I0929 19:18:14.013314 4792 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Sep 29 19:18:14 crc kubenswrapper[4792]: I0929 19:18:14.181533 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-bccf8f775-5gflj"] Sep 29 19:18:14 crc kubenswrapper[4792]: I0929 19:18:14.248426 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-conductor-db-sync-pfgjk"] Sep 29 19:18:14 crc kubenswrapper[4792]: I0929 19:18:14.250151 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-pfgjk" Sep 29 19:18:14 crc kubenswrapper[4792]: I0929 19:18:14.253256 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-scripts" Sep 29 19:18:14 crc kubenswrapper[4792]: I0929 19:18:14.253713 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-config-data" Sep 29 19:18:14 crc kubenswrapper[4792]: I0929 19:18:14.259573 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-pfgjk"] Sep 29 19:18:14 crc kubenswrapper[4792]: I0929 19:18:14.365499 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Sep 29 19:18:14 crc kubenswrapper[4792]: I0929 19:18:14.371803 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3e141a44-a3f2-4d4c-af13-e6ca3d76ea01-config-data\") pod \"nova-cell1-conductor-db-sync-pfgjk\" (UID: \"3e141a44-a3f2-4d4c-af13-e6ca3d76ea01\") " pod="openstack/nova-cell1-conductor-db-sync-pfgjk" Sep 29 19:18:14 crc kubenswrapper[4792]: I0929 19:18:14.372068 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3e141a44-a3f2-4d4c-af13-e6ca3d76ea01-scripts\") pod \"nova-cell1-conductor-db-sync-pfgjk\" (UID: \"3e141a44-a3f2-4d4c-af13-e6ca3d76ea01\") " pod="openstack/nova-cell1-conductor-db-sync-pfgjk" Sep 29 19:18:14 crc kubenswrapper[4792]: I0929 19:18:14.372166 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sm9fs\" (UniqueName: \"kubernetes.io/projected/3e141a44-a3f2-4d4c-af13-e6ca3d76ea01-kube-api-access-sm9fs\") pod \"nova-cell1-conductor-db-sync-pfgjk\" (UID: \"3e141a44-a3f2-4d4c-af13-e6ca3d76ea01\") " pod="openstack/nova-cell1-conductor-db-sync-pfgjk" Sep 29 19:18:14 crc kubenswrapper[4792]: I0929 19:18:14.372250 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3e141a44-a3f2-4d4c-af13-e6ca3d76ea01-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-pfgjk\" (UID: \"3e141a44-a3f2-4d4c-af13-e6ca3d76ea01\") " pod="openstack/nova-cell1-conductor-db-sync-pfgjk" Sep 29 19:18:14 crc kubenswrapper[4792]: W0929 19:18:14.372533 4792 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd967b8db_0961_4897_a859_f1d46d30dba8.slice/crio-38a33ad11a6f9bc3f846aaae0c9c4ec1e7cb5595bb982db5349af63d0017b67c WatchSource:0}: Error finding container 38a33ad11a6f9bc3f846aaae0c9c4ec1e7cb5595bb982db5349af63d0017b67c: Status 404 returned error can't find the container with id 38a33ad11a6f9bc3f846aaae0c9c4ec1e7cb5595bb982db5349af63d0017b67c Sep 29 19:18:14 crc kubenswrapper[4792]: I0929 19:18:14.393289 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Sep 29 19:18:14 crc kubenswrapper[4792]: W0929 19:18:14.394038 4792 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod32dbf736_0a8a_4dfc_a3e3_ae36c3b2aa54.slice/crio-5ae2922fca1f8ffbbd0ad5e6328b2eaa58ce78524496bcd89144b93ad87ac513 WatchSource:0}: Error finding container 5ae2922fca1f8ffbbd0ad5e6328b2eaa58ce78524496bcd89144b93ad87ac513: Status 404 returned error can't find the container with id 5ae2922fca1f8ffbbd0ad5e6328b2eaa58ce78524496bcd89144b93ad87ac513 Sep 29 19:18:14 crc kubenswrapper[4792]: I0929 19:18:14.474412 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3e141a44-a3f2-4d4c-af13-e6ca3d76ea01-config-data\") pod \"nova-cell1-conductor-db-sync-pfgjk\" (UID: \"3e141a44-a3f2-4d4c-af13-e6ca3d76ea01\") " pod="openstack/nova-cell1-conductor-db-sync-pfgjk" Sep 29 19:18:14 crc kubenswrapper[4792]: I0929 19:18:14.474515 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3e141a44-a3f2-4d4c-af13-e6ca3d76ea01-scripts\") pod \"nova-cell1-conductor-db-sync-pfgjk\" (UID: \"3e141a44-a3f2-4d4c-af13-e6ca3d76ea01\") " pod="openstack/nova-cell1-conductor-db-sync-pfgjk" Sep 29 19:18:14 crc kubenswrapper[4792]: I0929 19:18:14.474563 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sm9fs\" (UniqueName: \"kubernetes.io/projected/3e141a44-a3f2-4d4c-af13-e6ca3d76ea01-kube-api-access-sm9fs\") pod \"nova-cell1-conductor-db-sync-pfgjk\" (UID: \"3e141a44-a3f2-4d4c-af13-e6ca3d76ea01\") " pod="openstack/nova-cell1-conductor-db-sync-pfgjk" Sep 29 19:18:14 crc kubenswrapper[4792]: I0929 19:18:14.474605 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3e141a44-a3f2-4d4c-af13-e6ca3d76ea01-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-pfgjk\" (UID: \"3e141a44-a3f2-4d4c-af13-e6ca3d76ea01\") " pod="openstack/nova-cell1-conductor-db-sync-pfgjk" Sep 29 19:18:14 crc kubenswrapper[4792]: I0929 19:18:14.479680 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3e141a44-a3f2-4d4c-af13-e6ca3d76ea01-config-data\") pod \"nova-cell1-conductor-db-sync-pfgjk\" (UID: \"3e141a44-a3f2-4d4c-af13-e6ca3d76ea01\") " pod="openstack/nova-cell1-conductor-db-sync-pfgjk" Sep 29 19:18:14 crc kubenswrapper[4792]: I0929 19:18:14.479786 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3e141a44-a3f2-4d4c-af13-e6ca3d76ea01-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-pfgjk\" (UID: \"3e141a44-a3f2-4d4c-af13-e6ca3d76ea01\") " pod="openstack/nova-cell1-conductor-db-sync-pfgjk" Sep 29 19:18:14 crc kubenswrapper[4792]: I0929 19:18:14.484655 4792 generic.go:334] "Generic (PLEG): container finished" podID="6c32ebe8-d6f1-4ce5-be8e-42cadbdfe789" containerID="9715e8ddda7b4e4778e7e2179fd1ce8a40c77edaccb014a951eedc0657ad8744" exitCode=0 Sep 29 19:18:14 crc kubenswrapper[4792]: I0929 19:18:14.484837 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3e141a44-a3f2-4d4c-af13-e6ca3d76ea01-scripts\") pod \"nova-cell1-conductor-db-sync-pfgjk\" (UID: \"3e141a44-a3f2-4d4c-af13-e6ca3d76ea01\") " pod="openstack/nova-cell1-conductor-db-sync-pfgjk" Sep 29 19:18:14 crc kubenswrapper[4792]: I0929 19:18:14.484875 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-bccf8f775-5gflj" event={"ID":"6c32ebe8-d6f1-4ce5-be8e-42cadbdfe789","Type":"ContainerDied","Data":"9715e8ddda7b4e4778e7e2179fd1ce8a40c77edaccb014a951eedc0657ad8744"} Sep 29 19:18:14 crc kubenswrapper[4792]: I0929 19:18:14.484946 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-bccf8f775-5gflj" event={"ID":"6c32ebe8-d6f1-4ce5-be8e-42cadbdfe789","Type":"ContainerStarted","Data":"73ab2df25048104ac7df626c6b98743115e66f74c56d5931e7be512c914862a7"} Sep 29 19:18:14 crc kubenswrapper[4792]: I0929 19:18:14.486907 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"601d6005-0364-43d4-89bc-ecdc60ef2bd9","Type":"ContainerStarted","Data":"79da213cd71b02b8425d137d735397d0903efbf51d9bf60e5930746557568975"} Sep 29 19:18:14 crc kubenswrapper[4792]: I0929 19:18:14.490278 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-zzmwh" event={"ID":"683f150a-35bc-423d-af3a-005d3a967d88","Type":"ContainerStarted","Data":"437cffbc7687f5e1387fea4c43fdd36e1183bf1e81416221b44a8a6819c55e17"} Sep 29 19:18:14 crc kubenswrapper[4792]: I0929 19:18:14.490310 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-zzmwh" event={"ID":"683f150a-35bc-423d-af3a-005d3a967d88","Type":"ContainerStarted","Data":"998f28590d59d3121bbf9d0f288497ccdab92fc739a3b9b624a4a651e36b3c12"} Sep 29 19:18:14 crc kubenswrapper[4792]: I0929 19:18:14.491078 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"98f4b269-3703-47ff-826a-f9f4f9ac5a98","Type":"ContainerStarted","Data":"a82dfbcff16297f48390c2808c6f8ebc6e24e11cc9802a6753d7a349adbb22ab"} Sep 29 19:18:14 crc kubenswrapper[4792]: I0929 19:18:14.492541 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"32dbf736-0a8a-4dfc-a3e3-ae36c3b2aa54","Type":"ContainerStarted","Data":"5ae2922fca1f8ffbbd0ad5e6328b2eaa58ce78524496bcd89144b93ad87ac513"} Sep 29 19:18:14 crc kubenswrapper[4792]: I0929 19:18:14.494761 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"d967b8db-0961-4897-a859-f1d46d30dba8","Type":"ContainerStarted","Data":"38a33ad11a6f9bc3f846aaae0c9c4ec1e7cb5595bb982db5349af63d0017b67c"} Sep 29 19:18:14 crc kubenswrapper[4792]: I0929 19:18:14.504787 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sm9fs\" (UniqueName: \"kubernetes.io/projected/3e141a44-a3f2-4d4c-af13-e6ca3d76ea01-kube-api-access-sm9fs\") pod \"nova-cell1-conductor-db-sync-pfgjk\" (UID: \"3e141a44-a3f2-4d4c-af13-e6ca3d76ea01\") " pod="openstack/nova-cell1-conductor-db-sync-pfgjk" Sep 29 19:18:14 crc kubenswrapper[4792]: I0929 19:18:14.552926 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-cell-mapping-zzmwh" podStartSLOduration=2.552906091 podStartE2EDuration="2.552906091s" podCreationTimestamp="2025-09-29 19:18:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 19:18:14.541636118 +0000 UTC m=+1306.534943534" watchObservedRunningTime="2025-09-29 19:18:14.552906091 +0000 UTC m=+1306.546213487" Sep 29 19:18:14 crc kubenswrapper[4792]: I0929 19:18:14.595931 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-pfgjk" Sep 29 19:18:15 crc kubenswrapper[4792]: I0929 19:18:15.099809 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-pfgjk"] Sep 29 19:18:15 crc kubenswrapper[4792]: W0929 19:18:15.109799 4792 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3e141a44_a3f2_4d4c_af13_e6ca3d76ea01.slice/crio-7810ec2c6355304bded46e13996f74918eba0ab9a2b042f887d98a9aa33a86a2 WatchSource:0}: Error finding container 7810ec2c6355304bded46e13996f74918eba0ab9a2b042f887d98a9aa33a86a2: Status 404 returned error can't find the container with id 7810ec2c6355304bded46e13996f74918eba0ab9a2b042f887d98a9aa33a86a2 Sep 29 19:18:15 crc kubenswrapper[4792]: I0929 19:18:15.539104 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-pfgjk" event={"ID":"3e141a44-a3f2-4d4c-af13-e6ca3d76ea01","Type":"ContainerStarted","Data":"7810ec2c6355304bded46e13996f74918eba0ab9a2b042f887d98a9aa33a86a2"} Sep 29 19:18:15 crc kubenswrapper[4792]: I0929 19:18:15.581655 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-bccf8f775-5gflj" event={"ID":"6c32ebe8-d6f1-4ce5-be8e-42cadbdfe789","Type":"ContainerStarted","Data":"595a3a55ac7fb226b96ff0d1ba205c24fa16c8065d0979c12837fbd99fabbee6"} Sep 29 19:18:15 crc kubenswrapper[4792]: I0929 19:18:15.581703 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-bccf8f775-5gflj" Sep 29 19:18:15 crc kubenswrapper[4792]: I0929 19:18:15.621582 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-bccf8f775-5gflj" podStartSLOduration=3.6215621049999998 podStartE2EDuration="3.621562105s" podCreationTimestamp="2025-09-29 19:18:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 19:18:15.610269242 +0000 UTC m=+1307.603576638" watchObservedRunningTime="2025-09-29 19:18:15.621562105 +0000 UTC m=+1307.614869501" Sep 29 19:18:16 crc kubenswrapper[4792]: I0929 19:18:16.585493 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Sep 29 19:18:16 crc kubenswrapper[4792]: I0929 19:18:16.609495 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-pfgjk" event={"ID":"3e141a44-a3f2-4d4c-af13-e6ca3d76ea01","Type":"ContainerStarted","Data":"a66e8d41695d0d35ba48561d3bf9e2a54e0b4d480b4091ce7a54ec48f6bb7ebd"} Sep 29 19:18:16 crc kubenswrapper[4792]: I0929 19:18:16.615042 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Sep 29 19:18:18 crc kubenswrapper[4792]: I0929 19:18:18.550761 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-dfd9c6b56-wq84c" Sep 29 19:18:18 crc kubenswrapper[4792]: I0929 19:18:18.574955 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-conductor-db-sync-pfgjk" podStartSLOduration=4.574936406 podStartE2EDuration="4.574936406s" podCreationTimestamp="2025-09-29 19:18:14 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 19:18:16.631069673 +0000 UTC m=+1308.624377069" watchObservedRunningTime="2025-09-29 19:18:18.574936406 +0000 UTC m=+1310.568243792" Sep 29 19:18:18 crc kubenswrapper[4792]: I0929 19:18:18.634339 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"d967b8db-0961-4897-a859-f1d46d30dba8","Type":"ContainerStarted","Data":"1081e48c669d84e83b06df9d9d775cc07869a324aa1810d929d32bac72816f3f"} Sep 29 19:18:18 crc kubenswrapper[4792]: I0929 19:18:18.634437 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-cell1-novncproxy-0" podUID="d967b8db-0961-4897-a859-f1d46d30dba8" containerName="nova-cell1-novncproxy-novncproxy" containerID="cri-o://1081e48c669d84e83b06df9d9d775cc07869a324aa1810d929d32bac72816f3f" gracePeriod=30 Sep 29 19:18:18 crc kubenswrapper[4792]: I0929 19:18:18.640943 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"601d6005-0364-43d4-89bc-ecdc60ef2bd9","Type":"ContainerStarted","Data":"dbaa48e353014d4ed336ac537103e150b1e3ae0b71d7856a80eb43b5ad77eedf"} Sep 29 19:18:18 crc kubenswrapper[4792]: I0929 19:18:18.650774 4792 generic.go:334] "Generic (PLEG): container finished" podID="eb79e1ef-cf0c-407b-9b37-c7ad8d65a3cc" containerID="44478d1bf7a5ee27026ad7e66c3d25fb1975493dd5e5e44e83e0e00c55fcb046" exitCode=137 Sep 29 19:18:18 crc kubenswrapper[4792]: I0929 19:18:18.650976 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-dfd9c6b56-wq84c" Sep 29 19:18:18 crc kubenswrapper[4792]: I0929 19:18:18.651086 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-dfd9c6b56-wq84c" event={"ID":"eb79e1ef-cf0c-407b-9b37-c7ad8d65a3cc","Type":"ContainerDied","Data":"44478d1bf7a5ee27026ad7e66c3d25fb1975493dd5e5e44e83e0e00c55fcb046"} Sep 29 19:18:18 crc kubenswrapper[4792]: I0929 19:18:18.651153 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-dfd9c6b56-wq84c" event={"ID":"eb79e1ef-cf0c-407b-9b37-c7ad8d65a3cc","Type":"ContainerDied","Data":"44e56ba45bc8ca2ce2011cd32946ce2f00f01164740cc7be4438b0ed89860256"} Sep 29 19:18:18 crc kubenswrapper[4792]: I0929 19:18:18.651171 4792 scope.go:117] "RemoveContainer" containerID="33392c65aaa8fe7892add1ddea19e02248b8bee9d86ee8c7d1300e0357c0e768" Sep 29 19:18:18 crc kubenswrapper[4792]: I0929 19:18:18.657500 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-novncproxy-0" podStartSLOduration=3.157423717 podStartE2EDuration="6.65747668s" podCreationTimestamp="2025-09-29 19:18:12 +0000 UTC" firstStartedPulling="2025-09-29 19:18:14.375241395 +0000 UTC m=+1306.368548791" lastFinishedPulling="2025-09-29 19:18:17.875294358 +0000 UTC m=+1309.868601754" observedRunningTime="2025-09-29 19:18:18.650954321 +0000 UTC m=+1310.644261717" watchObservedRunningTime="2025-09-29 19:18:18.65747668 +0000 UTC m=+1310.650784076" Sep 29 19:18:18 crc kubenswrapper[4792]: I0929 19:18:18.659828 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"98f4b269-3703-47ff-826a-f9f4f9ac5a98","Type":"ContainerStarted","Data":"b7dd0ce8ce59435410436b2e79bfbdcdeded61430c1bb76f80c2feaf8a3be826"} Sep 29 19:18:18 crc kubenswrapper[4792]: I0929 19:18:18.659874 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"98f4b269-3703-47ff-826a-f9f4f9ac5a98","Type":"ContainerStarted","Data":"8db4ea34f4e7b8929509c6d3a65341ac25ecfe70677a497947e4a3ab6e35f7fc"} Sep 29 19:18:18 crc kubenswrapper[4792]: I0929 19:18:18.659977 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="98f4b269-3703-47ff-826a-f9f4f9ac5a98" containerName="nova-metadata-log" containerID="cri-o://8db4ea34f4e7b8929509c6d3a65341ac25ecfe70677a497947e4a3ab6e35f7fc" gracePeriod=30 Sep 29 19:18:18 crc kubenswrapper[4792]: I0929 19:18:18.660433 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="98f4b269-3703-47ff-826a-f9f4f9ac5a98" containerName="nova-metadata-metadata" containerID="cri-o://b7dd0ce8ce59435410436b2e79bfbdcdeded61430c1bb76f80c2feaf8a3be826" gracePeriod=30 Sep 29 19:18:18 crc kubenswrapper[4792]: I0929 19:18:18.671698 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"32dbf736-0a8a-4dfc-a3e3-ae36c3b2aa54","Type":"ContainerStarted","Data":"073bc9d902930b2c2211466a500c9a47dba48dabe651cac9b09352139a4fbcda"} Sep 29 19:18:18 crc kubenswrapper[4792]: I0929 19:18:18.689012 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/eb79e1ef-cf0c-407b-9b37-c7ad8d65a3cc-combined-ca-bundle\") pod \"eb79e1ef-cf0c-407b-9b37-c7ad8d65a3cc\" (UID: \"eb79e1ef-cf0c-407b-9b37-c7ad8d65a3cc\") " Sep 29 19:18:18 crc kubenswrapper[4792]: I0929 19:18:18.689082 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/eb79e1ef-cf0c-407b-9b37-c7ad8d65a3cc-scripts\") pod \"eb79e1ef-cf0c-407b-9b37-c7ad8d65a3cc\" (UID: \"eb79e1ef-cf0c-407b-9b37-c7ad8d65a3cc\") " Sep 29 19:18:18 crc kubenswrapper[4792]: I0929 19:18:18.689123 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vctj4\" (UniqueName: \"kubernetes.io/projected/eb79e1ef-cf0c-407b-9b37-c7ad8d65a3cc-kube-api-access-vctj4\") pod \"eb79e1ef-cf0c-407b-9b37-c7ad8d65a3cc\" (UID: \"eb79e1ef-cf0c-407b-9b37-c7ad8d65a3cc\") " Sep 29 19:18:18 crc kubenswrapper[4792]: I0929 19:18:18.689198 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/eb79e1ef-cf0c-407b-9b37-c7ad8d65a3cc-horizon-tls-certs\") pod \"eb79e1ef-cf0c-407b-9b37-c7ad8d65a3cc\" (UID: \"eb79e1ef-cf0c-407b-9b37-c7ad8d65a3cc\") " Sep 29 19:18:18 crc kubenswrapper[4792]: I0929 19:18:18.689232 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/eb79e1ef-cf0c-407b-9b37-c7ad8d65a3cc-config-data\") pod \"eb79e1ef-cf0c-407b-9b37-c7ad8d65a3cc\" (UID: \"eb79e1ef-cf0c-407b-9b37-c7ad8d65a3cc\") " Sep 29 19:18:18 crc kubenswrapper[4792]: I0929 19:18:18.689248 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/eb79e1ef-cf0c-407b-9b37-c7ad8d65a3cc-horizon-secret-key\") pod \"eb79e1ef-cf0c-407b-9b37-c7ad8d65a3cc\" (UID: \"eb79e1ef-cf0c-407b-9b37-c7ad8d65a3cc\") " Sep 29 19:18:18 crc kubenswrapper[4792]: I0929 19:18:18.689334 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/eb79e1ef-cf0c-407b-9b37-c7ad8d65a3cc-logs\") pod \"eb79e1ef-cf0c-407b-9b37-c7ad8d65a3cc\" (UID: \"eb79e1ef-cf0c-407b-9b37-c7ad8d65a3cc\") " Sep 29 19:18:18 crc kubenswrapper[4792]: I0929 19:18:18.690176 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/eb79e1ef-cf0c-407b-9b37-c7ad8d65a3cc-logs" (OuterVolumeSpecName: "logs") pod "eb79e1ef-cf0c-407b-9b37-c7ad8d65a3cc" (UID: "eb79e1ef-cf0c-407b-9b37-c7ad8d65a3cc"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 19:18:18 crc kubenswrapper[4792]: I0929 19:18:18.692466 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.856667195 podStartE2EDuration="6.692454419s" podCreationTimestamp="2025-09-29 19:18:12 +0000 UTC" firstStartedPulling="2025-09-29 19:18:14.038739244 +0000 UTC m=+1306.032046630" lastFinishedPulling="2025-09-29 19:18:17.874526468 +0000 UTC m=+1309.867833854" observedRunningTime="2025-09-29 19:18:18.683128047 +0000 UTC m=+1310.676435443" watchObservedRunningTime="2025-09-29 19:18:18.692454419 +0000 UTC m=+1310.685761815" Sep 29 19:18:18 crc kubenswrapper[4792]: I0929 19:18:18.705040 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/eb79e1ef-cf0c-407b-9b37-c7ad8d65a3cc-kube-api-access-vctj4" (OuterVolumeSpecName: "kube-api-access-vctj4") pod "eb79e1ef-cf0c-407b-9b37-c7ad8d65a3cc" (UID: "eb79e1ef-cf0c-407b-9b37-c7ad8d65a3cc"). InnerVolumeSpecName "kube-api-access-vctj4". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:18:18 crc kubenswrapper[4792]: I0929 19:18:18.707497 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/eb79e1ef-cf0c-407b-9b37-c7ad8d65a3cc-horizon-secret-key" (OuterVolumeSpecName: "horizon-secret-key") pod "eb79e1ef-cf0c-407b-9b37-c7ad8d65a3cc" (UID: "eb79e1ef-cf0c-407b-9b37-c7ad8d65a3cc"). InnerVolumeSpecName "horizon-secret-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:18:18 crc kubenswrapper[4792]: I0929 19:18:18.716663 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=3.231593084 podStartE2EDuration="6.716641237s" podCreationTimestamp="2025-09-29 19:18:12 +0000 UTC" firstStartedPulling="2025-09-29 19:18:14.396327893 +0000 UTC m=+1306.389635289" lastFinishedPulling="2025-09-29 19:18:17.881376036 +0000 UTC m=+1309.874683442" observedRunningTime="2025-09-29 19:18:18.701267648 +0000 UTC m=+1310.694575044" watchObservedRunningTime="2025-09-29 19:18:18.716641237 +0000 UTC m=+1310.709948633" Sep 29 19:18:18 crc kubenswrapper[4792]: I0929 19:18:18.739563 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/eb79e1ef-cf0c-407b-9b37-c7ad8d65a3cc-scripts" (OuterVolumeSpecName: "scripts") pod "eb79e1ef-cf0c-407b-9b37-c7ad8d65a3cc" (UID: "eb79e1ef-cf0c-407b-9b37-c7ad8d65a3cc"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:18:18 crc kubenswrapper[4792]: I0929 19:18:18.759560 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/eb79e1ef-cf0c-407b-9b37-c7ad8d65a3cc-config-data" (OuterVolumeSpecName: "config-data") pod "eb79e1ef-cf0c-407b-9b37-c7ad8d65a3cc" (UID: "eb79e1ef-cf0c-407b-9b37-c7ad8d65a3cc"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:18:18 crc kubenswrapper[4792]: I0929 19:18:18.761076 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/eb79e1ef-cf0c-407b-9b37-c7ad8d65a3cc-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "eb79e1ef-cf0c-407b-9b37-c7ad8d65a3cc" (UID: "eb79e1ef-cf0c-407b-9b37-c7ad8d65a3cc"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:18:18 crc kubenswrapper[4792]: I0929 19:18:18.788036 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/eb79e1ef-cf0c-407b-9b37-c7ad8d65a3cc-horizon-tls-certs" (OuterVolumeSpecName: "horizon-tls-certs") pod "eb79e1ef-cf0c-407b-9b37-c7ad8d65a3cc" (UID: "eb79e1ef-cf0c-407b-9b37-c7ad8d65a3cc"). InnerVolumeSpecName "horizon-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:18:18 crc kubenswrapper[4792]: I0929 19:18:18.791391 4792 reconciler_common.go:293] "Volume detached for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/eb79e1ef-cf0c-407b-9b37-c7ad8d65a3cc-horizon-tls-certs\") on node \"crc\" DevicePath \"\"" Sep 29 19:18:18 crc kubenswrapper[4792]: I0929 19:18:18.791424 4792 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/eb79e1ef-cf0c-407b-9b37-c7ad8d65a3cc-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 19:18:18 crc kubenswrapper[4792]: I0929 19:18:18.791438 4792 reconciler_common.go:293] "Volume detached for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/eb79e1ef-cf0c-407b-9b37-c7ad8d65a3cc-horizon-secret-key\") on node \"crc\" DevicePath \"\"" Sep 29 19:18:18 crc kubenswrapper[4792]: I0929 19:18:18.791450 4792 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/eb79e1ef-cf0c-407b-9b37-c7ad8d65a3cc-logs\") on node \"crc\" DevicePath \"\"" Sep 29 19:18:18 crc kubenswrapper[4792]: I0929 19:18:18.791462 4792 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/eb79e1ef-cf0c-407b-9b37-c7ad8d65a3cc-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 19:18:18 crc kubenswrapper[4792]: I0929 19:18:18.791472 4792 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/eb79e1ef-cf0c-407b-9b37-c7ad8d65a3cc-scripts\") on node \"crc\" DevicePath \"\"" Sep 29 19:18:18 crc kubenswrapper[4792]: I0929 19:18:18.791484 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vctj4\" (UniqueName: \"kubernetes.io/projected/eb79e1ef-cf0c-407b-9b37-c7ad8d65a3cc-kube-api-access-vctj4\") on node \"crc\" DevicePath \"\"" Sep 29 19:18:18 crc kubenswrapper[4792]: I0929 19:18:18.862324 4792 scope.go:117] "RemoveContainer" containerID="44478d1bf7a5ee27026ad7e66c3d25fb1975493dd5e5e44e83e0e00c55fcb046" Sep 29 19:18:18 crc kubenswrapper[4792]: I0929 19:18:18.883675 4792 scope.go:117] "RemoveContainer" containerID="33392c65aaa8fe7892add1ddea19e02248b8bee9d86ee8c7d1300e0357c0e768" Sep 29 19:18:18 crc kubenswrapper[4792]: E0929 19:18:18.884636 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"33392c65aaa8fe7892add1ddea19e02248b8bee9d86ee8c7d1300e0357c0e768\": container with ID starting with 33392c65aaa8fe7892add1ddea19e02248b8bee9d86ee8c7d1300e0357c0e768 not found: ID does not exist" containerID="33392c65aaa8fe7892add1ddea19e02248b8bee9d86ee8c7d1300e0357c0e768" Sep 29 19:18:18 crc kubenswrapper[4792]: I0929 19:18:18.884677 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"33392c65aaa8fe7892add1ddea19e02248b8bee9d86ee8c7d1300e0357c0e768"} err="failed to get container status \"33392c65aaa8fe7892add1ddea19e02248b8bee9d86ee8c7d1300e0357c0e768\": rpc error: code = NotFound desc = could not find container \"33392c65aaa8fe7892add1ddea19e02248b8bee9d86ee8c7d1300e0357c0e768\": container with ID starting with 33392c65aaa8fe7892add1ddea19e02248b8bee9d86ee8c7d1300e0357c0e768 not found: ID does not exist" Sep 29 19:18:18 crc kubenswrapper[4792]: I0929 19:18:18.884704 4792 scope.go:117] "RemoveContainer" containerID="44478d1bf7a5ee27026ad7e66c3d25fb1975493dd5e5e44e83e0e00c55fcb046" Sep 29 19:18:18 crc kubenswrapper[4792]: E0929 19:18:18.885084 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"44478d1bf7a5ee27026ad7e66c3d25fb1975493dd5e5e44e83e0e00c55fcb046\": container with ID starting with 44478d1bf7a5ee27026ad7e66c3d25fb1975493dd5e5e44e83e0e00c55fcb046 not found: ID does not exist" containerID="44478d1bf7a5ee27026ad7e66c3d25fb1975493dd5e5e44e83e0e00c55fcb046" Sep 29 19:18:18 crc kubenswrapper[4792]: I0929 19:18:18.885129 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"44478d1bf7a5ee27026ad7e66c3d25fb1975493dd5e5e44e83e0e00c55fcb046"} err="failed to get container status \"44478d1bf7a5ee27026ad7e66c3d25fb1975493dd5e5e44e83e0e00c55fcb046\": rpc error: code = NotFound desc = could not find container \"44478d1bf7a5ee27026ad7e66c3d25fb1975493dd5e5e44e83e0e00c55fcb046\": container with ID starting with 44478d1bf7a5ee27026ad7e66c3d25fb1975493dd5e5e44e83e0e00c55fcb046 not found: ID does not exist" Sep 29 19:18:19 crc kubenswrapper[4792]: I0929 19:18:19.076615 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-dfd9c6b56-wq84c"] Sep 29 19:18:19 crc kubenswrapper[4792]: I0929 19:18:19.076652 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/horizon-dfd9c6b56-wq84c"] Sep 29 19:18:19 crc kubenswrapper[4792]: I0929 19:18:19.728697 4792 generic.go:334] "Generic (PLEG): container finished" podID="98f4b269-3703-47ff-826a-f9f4f9ac5a98" containerID="8db4ea34f4e7b8929509c6d3a65341ac25ecfe70677a497947e4a3ab6e35f7fc" exitCode=143 Sep 29 19:18:19 crc kubenswrapper[4792]: I0929 19:18:19.729067 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"98f4b269-3703-47ff-826a-f9f4f9ac5a98","Type":"ContainerDied","Data":"8db4ea34f4e7b8929509c6d3a65341ac25ecfe70677a497947e4a3ab6e35f7fc"} Sep 29 19:18:19 crc kubenswrapper[4792]: I0929 19:18:19.736204 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"601d6005-0364-43d4-89bc-ecdc60ef2bd9","Type":"ContainerStarted","Data":"736641be11ff3754e43241afbc4740b975c411cfdfccb9f5cf6470f4573f8e68"} Sep 29 19:18:19 crc kubenswrapper[4792]: I0929 19:18:19.767526 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=3.906624682 podStartE2EDuration="7.767174031s" podCreationTimestamp="2025-09-29 19:18:12 +0000 UTC" firstStartedPulling="2025-09-29 19:18:14.013066876 +0000 UTC m=+1306.006374272" lastFinishedPulling="2025-09-29 19:18:17.873616225 +0000 UTC m=+1309.866923621" observedRunningTime="2025-09-29 19:18:19.760362574 +0000 UTC m=+1311.753669990" watchObservedRunningTime="2025-09-29 19:18:19.767174031 +0000 UTC m=+1311.760481427" Sep 29 19:18:20 crc kubenswrapper[4792]: I0929 19:18:20.887417 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Sep 29 19:18:21 crc kubenswrapper[4792]: I0929 19:18:21.027464 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="eb79e1ef-cf0c-407b-9b37-c7ad8d65a3cc" path="/var/lib/kubelet/pods/eb79e1ef-cf0c-407b-9b37-c7ad8d65a3cc/volumes" Sep 29 19:18:23 crc kubenswrapper[4792]: I0929 19:18:23.164236 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Sep 29 19:18:23 crc kubenswrapper[4792]: I0929 19:18:23.164770 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Sep 29 19:18:23 crc kubenswrapper[4792]: I0929 19:18:23.178765 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Sep 29 19:18:23 crc kubenswrapper[4792]: I0929 19:18:23.178880 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Sep 29 19:18:23 crc kubenswrapper[4792]: I0929 19:18:23.211463 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Sep 29 19:18:23 crc kubenswrapper[4792]: I0929 19:18:23.211524 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Sep 29 19:18:23 crc kubenswrapper[4792]: I0929 19:18:23.241921 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Sep 29 19:18:23 crc kubenswrapper[4792]: I0929 19:18:23.307121 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-bccf8f775-5gflj" Sep 29 19:18:23 crc kubenswrapper[4792]: I0929 19:18:23.323563 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-novncproxy-0" Sep 29 19:18:23 crc kubenswrapper[4792]: I0929 19:18:23.413821 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6578955fd5-jn4q7"] Sep 29 19:18:23 crc kubenswrapper[4792]: I0929 19:18:23.414068 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-6578955fd5-jn4q7" podUID="04e38954-ec57-4d8c-811b-e05b3ca8ba9d" containerName="dnsmasq-dns" containerID="cri-o://e6d70dacef0aaa7cbff2e1e279bdbb0094e89f8a294673540279a4d0279e6d89" gracePeriod=10 Sep 29 19:18:23 crc kubenswrapper[4792]: I0929 19:18:23.450608 4792 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-6578955fd5-jn4q7" podUID="04e38954-ec57-4d8c-811b-e05b3ca8ba9d" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.166:5353: connect: connection refused" Sep 29 19:18:23 crc kubenswrapper[4792]: I0929 19:18:23.795397 4792 generic.go:334] "Generic (PLEG): container finished" podID="04e38954-ec57-4d8c-811b-e05b3ca8ba9d" containerID="e6d70dacef0aaa7cbff2e1e279bdbb0094e89f8a294673540279a4d0279e6d89" exitCode=0 Sep 29 19:18:23 crc kubenswrapper[4792]: I0929 19:18:23.795477 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6578955fd5-jn4q7" event={"ID":"04e38954-ec57-4d8c-811b-e05b3ca8ba9d","Type":"ContainerDied","Data":"e6d70dacef0aaa7cbff2e1e279bdbb0094e89f8a294673540279a4d0279e6d89"} Sep 29 19:18:23 crc kubenswrapper[4792]: I0929 19:18:23.799416 4792 generic.go:334] "Generic (PLEG): container finished" podID="3e141a44-a3f2-4d4c-af13-e6ca3d76ea01" containerID="a66e8d41695d0d35ba48561d3bf9e2a54e0b4d480b4091ce7a54ec48f6bb7ebd" exitCode=0 Sep 29 19:18:23 crc kubenswrapper[4792]: I0929 19:18:23.799498 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-pfgjk" event={"ID":"3e141a44-a3f2-4d4c-af13-e6ca3d76ea01","Type":"ContainerDied","Data":"a66e8d41695d0d35ba48561d3bf9e2a54e0b4d480b4091ce7a54ec48f6bb7ebd"} Sep 29 19:18:23 crc kubenswrapper[4792]: I0929 19:18:23.804972 4792 generic.go:334] "Generic (PLEG): container finished" podID="683f150a-35bc-423d-af3a-005d3a967d88" containerID="437cffbc7687f5e1387fea4c43fdd36e1183bf1e81416221b44a8a6819c55e17" exitCode=0 Sep 29 19:18:23 crc kubenswrapper[4792]: I0929 19:18:23.805923 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-zzmwh" event={"ID":"683f150a-35bc-423d-af3a-005d3a967d88","Type":"ContainerDied","Data":"437cffbc7687f5e1387fea4c43fdd36e1183bf1e81416221b44a8a6819c55e17"} Sep 29 19:18:23 crc kubenswrapper[4792]: I0929 19:18:23.891925 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Sep 29 19:18:24 crc kubenswrapper[4792]: I0929 19:18:24.066511 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6578955fd5-jn4q7" Sep 29 19:18:24 crc kubenswrapper[4792]: I0929 19:18:24.224675 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/04e38954-ec57-4d8c-811b-e05b3ca8ba9d-ovsdbserver-nb\") pod \"04e38954-ec57-4d8c-811b-e05b3ca8ba9d\" (UID: \"04e38954-ec57-4d8c-811b-e05b3ca8ba9d\") " Sep 29 19:18:24 crc kubenswrapper[4792]: I0929 19:18:24.225404 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/04e38954-ec57-4d8c-811b-e05b3ca8ba9d-dns-svc\") pod \"04e38954-ec57-4d8c-811b-e05b3ca8ba9d\" (UID: \"04e38954-ec57-4d8c-811b-e05b3ca8ba9d\") " Sep 29 19:18:24 crc kubenswrapper[4792]: I0929 19:18:24.225428 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/04e38954-ec57-4d8c-811b-e05b3ca8ba9d-ovsdbserver-sb\") pod \"04e38954-ec57-4d8c-811b-e05b3ca8ba9d\" (UID: \"04e38954-ec57-4d8c-811b-e05b3ca8ba9d\") " Sep 29 19:18:24 crc kubenswrapper[4792]: I0929 19:18:24.225525 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/04e38954-ec57-4d8c-811b-e05b3ca8ba9d-dns-swift-storage-0\") pod \"04e38954-ec57-4d8c-811b-e05b3ca8ba9d\" (UID: \"04e38954-ec57-4d8c-811b-e05b3ca8ba9d\") " Sep 29 19:18:24 crc kubenswrapper[4792]: I0929 19:18:24.225552 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/04e38954-ec57-4d8c-811b-e05b3ca8ba9d-config\") pod \"04e38954-ec57-4d8c-811b-e05b3ca8ba9d\" (UID: \"04e38954-ec57-4d8c-811b-e05b3ca8ba9d\") " Sep 29 19:18:24 crc kubenswrapper[4792]: I0929 19:18:24.225578 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gqbzq\" (UniqueName: \"kubernetes.io/projected/04e38954-ec57-4d8c-811b-e05b3ca8ba9d-kube-api-access-gqbzq\") pod \"04e38954-ec57-4d8c-811b-e05b3ca8ba9d\" (UID: \"04e38954-ec57-4d8c-811b-e05b3ca8ba9d\") " Sep 29 19:18:24 crc kubenswrapper[4792]: I0929 19:18:24.248606 4792 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="601d6005-0364-43d4-89bc-ecdc60ef2bd9" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.0.188:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Sep 29 19:18:24 crc kubenswrapper[4792]: I0929 19:18:24.249220 4792 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="601d6005-0364-43d4-89bc-ecdc60ef2bd9" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.0.188:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Sep 29 19:18:24 crc kubenswrapper[4792]: I0929 19:18:24.264631 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/04e38954-ec57-4d8c-811b-e05b3ca8ba9d-kube-api-access-gqbzq" (OuterVolumeSpecName: "kube-api-access-gqbzq") pod "04e38954-ec57-4d8c-811b-e05b3ca8ba9d" (UID: "04e38954-ec57-4d8c-811b-e05b3ca8ba9d"). InnerVolumeSpecName "kube-api-access-gqbzq". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:18:24 crc kubenswrapper[4792]: I0929 19:18:24.327058 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gqbzq\" (UniqueName: \"kubernetes.io/projected/04e38954-ec57-4d8c-811b-e05b3ca8ba9d-kube-api-access-gqbzq\") on node \"crc\" DevicePath \"\"" Sep 29 19:18:24 crc kubenswrapper[4792]: I0929 19:18:24.334606 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/04e38954-ec57-4d8c-811b-e05b3ca8ba9d-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "04e38954-ec57-4d8c-811b-e05b3ca8ba9d" (UID: "04e38954-ec57-4d8c-811b-e05b3ca8ba9d"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:18:24 crc kubenswrapper[4792]: I0929 19:18:24.353479 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/04e38954-ec57-4d8c-811b-e05b3ca8ba9d-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "04e38954-ec57-4d8c-811b-e05b3ca8ba9d" (UID: "04e38954-ec57-4d8c-811b-e05b3ca8ba9d"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:18:24 crc kubenswrapper[4792]: I0929 19:18:24.358979 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/04e38954-ec57-4d8c-811b-e05b3ca8ba9d-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "04e38954-ec57-4d8c-811b-e05b3ca8ba9d" (UID: "04e38954-ec57-4d8c-811b-e05b3ca8ba9d"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:18:24 crc kubenswrapper[4792]: I0929 19:18:24.366827 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/04e38954-ec57-4d8c-811b-e05b3ca8ba9d-config" (OuterVolumeSpecName: "config") pod "04e38954-ec57-4d8c-811b-e05b3ca8ba9d" (UID: "04e38954-ec57-4d8c-811b-e05b3ca8ba9d"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:18:24 crc kubenswrapper[4792]: I0929 19:18:24.369935 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/04e38954-ec57-4d8c-811b-e05b3ca8ba9d-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "04e38954-ec57-4d8c-811b-e05b3ca8ba9d" (UID: "04e38954-ec57-4d8c-811b-e05b3ca8ba9d"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:18:24 crc kubenswrapper[4792]: I0929 19:18:24.428890 4792 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/04e38954-ec57-4d8c-811b-e05b3ca8ba9d-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Sep 29 19:18:24 crc kubenswrapper[4792]: I0929 19:18:24.428926 4792 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/04e38954-ec57-4d8c-811b-e05b3ca8ba9d-dns-svc\") on node \"crc\" DevicePath \"\"" Sep 29 19:18:24 crc kubenswrapper[4792]: I0929 19:18:24.428939 4792 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/04e38954-ec57-4d8c-811b-e05b3ca8ba9d-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Sep 29 19:18:24 crc kubenswrapper[4792]: I0929 19:18:24.428950 4792 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/04e38954-ec57-4d8c-811b-e05b3ca8ba9d-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Sep 29 19:18:24 crc kubenswrapper[4792]: I0929 19:18:24.428964 4792 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/04e38954-ec57-4d8c-811b-e05b3ca8ba9d-config\") on node \"crc\" DevicePath \"\"" Sep 29 19:18:24 crc kubenswrapper[4792]: I0929 19:18:24.815594 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6578955fd5-jn4q7" event={"ID":"04e38954-ec57-4d8c-811b-e05b3ca8ba9d","Type":"ContainerDied","Data":"3bc0a35efd9b16ac19255dae09f8e4d2e0736ded92f6ecb1dcedbdb987c6dcf3"} Sep 29 19:18:24 crc kubenswrapper[4792]: I0929 19:18:24.815703 4792 scope.go:117] "RemoveContainer" containerID="e6d70dacef0aaa7cbff2e1e279bdbb0094e89f8a294673540279a4d0279e6d89" Sep 29 19:18:24 crc kubenswrapper[4792]: I0929 19:18:24.816006 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6578955fd5-jn4q7" Sep 29 19:18:24 crc kubenswrapper[4792]: I0929 19:18:24.846631 4792 scope.go:117] "RemoveContainer" containerID="914d73e3665ff5c2b32960268da1543b1d450a8d6ebb7de8ce0ca8b48bf4c1d6" Sep 29 19:18:24 crc kubenswrapper[4792]: I0929 19:18:24.887604 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6578955fd5-jn4q7"] Sep 29 19:18:24 crc kubenswrapper[4792]: I0929 19:18:24.915179 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-6578955fd5-jn4q7"] Sep 29 19:18:25 crc kubenswrapper[4792]: I0929 19:18:25.025241 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="04e38954-ec57-4d8c-811b-e05b3ca8ba9d" path="/var/lib/kubelet/pods/04e38954-ec57-4d8c-811b-e05b3ca8ba9d/volumes" Sep 29 19:18:25 crc kubenswrapper[4792]: I0929 19:18:25.226535 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/kube-state-metrics-0"] Sep 29 19:18:25 crc kubenswrapper[4792]: I0929 19:18:25.226727 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/kube-state-metrics-0" podUID="a90baac4-d8fc-472d-992f-c1a36805b12e" containerName="kube-state-metrics" containerID="cri-o://b9a386bdcefa8295dc0acdad8dc91228d4d1c67b29a31f173488ab553ce7bf7c" gracePeriod=30 Sep 29 19:18:25 crc kubenswrapper[4792]: I0929 19:18:25.425540 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-zzmwh" Sep 29 19:18:25 crc kubenswrapper[4792]: I0929 19:18:25.436946 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-pfgjk" Sep 29 19:18:25 crc kubenswrapper[4792]: I0929 19:18:25.556233 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3e141a44-a3f2-4d4c-af13-e6ca3d76ea01-combined-ca-bundle\") pod \"3e141a44-a3f2-4d4c-af13-e6ca3d76ea01\" (UID: \"3e141a44-a3f2-4d4c-af13-e6ca3d76ea01\") " Sep 29 19:18:25 crc kubenswrapper[4792]: I0929 19:18:25.562458 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/683f150a-35bc-423d-af3a-005d3a967d88-scripts\") pod \"683f150a-35bc-423d-af3a-005d3a967d88\" (UID: \"683f150a-35bc-423d-af3a-005d3a967d88\") " Sep 29 19:18:25 crc kubenswrapper[4792]: I0929 19:18:25.563983 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3e141a44-a3f2-4d4c-af13-e6ca3d76ea01-scripts\") pod \"3e141a44-a3f2-4d4c-af13-e6ca3d76ea01\" (UID: \"3e141a44-a3f2-4d4c-af13-e6ca3d76ea01\") " Sep 29 19:18:25 crc kubenswrapper[4792]: I0929 19:18:25.565349 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sm9fs\" (UniqueName: \"kubernetes.io/projected/3e141a44-a3f2-4d4c-af13-e6ca3d76ea01-kube-api-access-sm9fs\") pod \"3e141a44-a3f2-4d4c-af13-e6ca3d76ea01\" (UID: \"3e141a44-a3f2-4d4c-af13-e6ca3d76ea01\") " Sep 29 19:18:25 crc kubenswrapper[4792]: I0929 19:18:25.592475 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/683f150a-35bc-423d-af3a-005d3a967d88-combined-ca-bundle\") pod \"683f150a-35bc-423d-af3a-005d3a967d88\" (UID: \"683f150a-35bc-423d-af3a-005d3a967d88\") " Sep 29 19:18:25 crc kubenswrapper[4792]: I0929 19:18:25.592573 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/683f150a-35bc-423d-af3a-005d3a967d88-config-data\") pod \"683f150a-35bc-423d-af3a-005d3a967d88\" (UID: \"683f150a-35bc-423d-af3a-005d3a967d88\") " Sep 29 19:18:25 crc kubenswrapper[4792]: I0929 19:18:25.592598 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3e141a44-a3f2-4d4c-af13-e6ca3d76ea01-config-data\") pod \"3e141a44-a3f2-4d4c-af13-e6ca3d76ea01\" (UID: \"3e141a44-a3f2-4d4c-af13-e6ca3d76ea01\") " Sep 29 19:18:25 crc kubenswrapper[4792]: I0929 19:18:25.592644 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d5trf\" (UniqueName: \"kubernetes.io/projected/683f150a-35bc-423d-af3a-005d3a967d88-kube-api-access-d5trf\") pod \"683f150a-35bc-423d-af3a-005d3a967d88\" (UID: \"683f150a-35bc-423d-af3a-005d3a967d88\") " Sep 29 19:18:25 crc kubenswrapper[4792]: I0929 19:18:25.594464 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3e141a44-a3f2-4d4c-af13-e6ca3d76ea01-scripts" (OuterVolumeSpecName: "scripts") pod "3e141a44-a3f2-4d4c-af13-e6ca3d76ea01" (UID: "3e141a44-a3f2-4d4c-af13-e6ca3d76ea01"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:18:25 crc kubenswrapper[4792]: I0929 19:18:25.594620 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3e141a44-a3f2-4d4c-af13-e6ca3d76ea01-kube-api-access-sm9fs" (OuterVolumeSpecName: "kube-api-access-sm9fs") pod "3e141a44-a3f2-4d4c-af13-e6ca3d76ea01" (UID: "3e141a44-a3f2-4d4c-af13-e6ca3d76ea01"). InnerVolumeSpecName "kube-api-access-sm9fs". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:18:25 crc kubenswrapper[4792]: I0929 19:18:25.598274 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/683f150a-35bc-423d-af3a-005d3a967d88-scripts" (OuterVolumeSpecName: "scripts") pod "683f150a-35bc-423d-af3a-005d3a967d88" (UID: "683f150a-35bc-423d-af3a-005d3a967d88"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:18:25 crc kubenswrapper[4792]: I0929 19:18:25.602537 4792 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/683f150a-35bc-423d-af3a-005d3a967d88-scripts\") on node \"crc\" DevicePath \"\"" Sep 29 19:18:25 crc kubenswrapper[4792]: I0929 19:18:25.603063 4792 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3e141a44-a3f2-4d4c-af13-e6ca3d76ea01-scripts\") on node \"crc\" DevicePath \"\"" Sep 29 19:18:25 crc kubenswrapper[4792]: I0929 19:18:25.604816 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sm9fs\" (UniqueName: \"kubernetes.io/projected/3e141a44-a3f2-4d4c-af13-e6ca3d76ea01-kube-api-access-sm9fs\") on node \"crc\" DevicePath \"\"" Sep 29 19:18:25 crc kubenswrapper[4792]: I0929 19:18:25.635561 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/683f150a-35bc-423d-af3a-005d3a967d88-kube-api-access-d5trf" (OuterVolumeSpecName: "kube-api-access-d5trf") pod "683f150a-35bc-423d-af3a-005d3a967d88" (UID: "683f150a-35bc-423d-af3a-005d3a967d88"). InnerVolumeSpecName "kube-api-access-d5trf". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:18:25 crc kubenswrapper[4792]: I0929 19:18:25.657014 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/683f150a-35bc-423d-af3a-005d3a967d88-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "683f150a-35bc-423d-af3a-005d3a967d88" (UID: "683f150a-35bc-423d-af3a-005d3a967d88"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:18:25 crc kubenswrapper[4792]: I0929 19:18:25.668258 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3e141a44-a3f2-4d4c-af13-e6ca3d76ea01-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "3e141a44-a3f2-4d4c-af13-e6ca3d76ea01" (UID: "3e141a44-a3f2-4d4c-af13-e6ca3d76ea01"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:18:25 crc kubenswrapper[4792]: I0929 19:18:25.671547 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3e141a44-a3f2-4d4c-af13-e6ca3d76ea01-config-data" (OuterVolumeSpecName: "config-data") pod "3e141a44-a3f2-4d4c-af13-e6ca3d76ea01" (UID: "3e141a44-a3f2-4d4c-af13-e6ca3d76ea01"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:18:25 crc kubenswrapper[4792]: I0929 19:18:25.700007 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/683f150a-35bc-423d-af3a-005d3a967d88-config-data" (OuterVolumeSpecName: "config-data") pod "683f150a-35bc-423d-af3a-005d3a967d88" (UID: "683f150a-35bc-423d-af3a-005d3a967d88"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:18:25 crc kubenswrapper[4792]: I0929 19:18:25.706729 4792 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/683f150a-35bc-423d-af3a-005d3a967d88-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 19:18:25 crc kubenswrapper[4792]: I0929 19:18:25.706774 4792 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/683f150a-35bc-423d-af3a-005d3a967d88-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 19:18:25 crc kubenswrapper[4792]: I0929 19:18:25.706788 4792 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3e141a44-a3f2-4d4c-af13-e6ca3d76ea01-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 19:18:25 crc kubenswrapper[4792]: I0929 19:18:25.706801 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d5trf\" (UniqueName: \"kubernetes.io/projected/683f150a-35bc-423d-af3a-005d3a967d88-kube-api-access-d5trf\") on node \"crc\" DevicePath \"\"" Sep 29 19:18:25 crc kubenswrapper[4792]: I0929 19:18:25.706814 4792 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3e141a44-a3f2-4d4c-af13-e6ca3d76ea01-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 19:18:25 crc kubenswrapper[4792]: I0929 19:18:25.769751 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Sep 29 19:18:25 crc kubenswrapper[4792]: I0929 19:18:25.838154 4792 generic.go:334] "Generic (PLEG): container finished" podID="a90baac4-d8fc-472d-992f-c1a36805b12e" containerID="b9a386bdcefa8295dc0acdad8dc91228d4d1c67b29a31f173488ab553ce7bf7c" exitCode=2 Sep 29 19:18:25 crc kubenswrapper[4792]: I0929 19:18:25.838210 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Sep 29 19:18:25 crc kubenswrapper[4792]: I0929 19:18:25.838210 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"a90baac4-d8fc-472d-992f-c1a36805b12e","Type":"ContainerDied","Data":"b9a386bdcefa8295dc0acdad8dc91228d4d1c67b29a31f173488ab553ce7bf7c"} Sep 29 19:18:25 crc kubenswrapper[4792]: I0929 19:18:25.838286 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"a90baac4-d8fc-472d-992f-c1a36805b12e","Type":"ContainerDied","Data":"3b7e18102a99e2f53e8b9df540a041b536c59902c9446bcaf71769c7f3c9ce9c"} Sep 29 19:18:25 crc kubenswrapper[4792]: I0929 19:18:25.838308 4792 scope.go:117] "RemoveContainer" containerID="b9a386bdcefa8295dc0acdad8dc91228d4d1c67b29a31f173488ab553ce7bf7c" Sep 29 19:18:25 crc kubenswrapper[4792]: I0929 19:18:25.841243 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-zzmwh" event={"ID":"683f150a-35bc-423d-af3a-005d3a967d88","Type":"ContainerDied","Data":"998f28590d59d3121bbf9d0f288497ccdab92fc739a3b9b624a4a651e36b3c12"} Sep 29 19:18:25 crc kubenswrapper[4792]: I0929 19:18:25.841271 4792 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="998f28590d59d3121bbf9d0f288497ccdab92fc739a3b9b624a4a651e36b3c12" Sep 29 19:18:25 crc kubenswrapper[4792]: I0929 19:18:25.841322 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-zzmwh" Sep 29 19:18:25 crc kubenswrapper[4792]: I0929 19:18:25.861867 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-pfgjk" event={"ID":"3e141a44-a3f2-4d4c-af13-e6ca3d76ea01","Type":"ContainerDied","Data":"7810ec2c6355304bded46e13996f74918eba0ab9a2b042f887d98a9aa33a86a2"} Sep 29 19:18:25 crc kubenswrapper[4792]: I0929 19:18:25.861916 4792 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7810ec2c6355304bded46e13996f74918eba0ab9a2b042f887d98a9aa33a86a2" Sep 29 19:18:25 crc kubenswrapper[4792]: I0929 19:18:25.862001 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-pfgjk" Sep 29 19:18:25 crc kubenswrapper[4792]: I0929 19:18:25.899244 4792 scope.go:117] "RemoveContainer" containerID="b9a386bdcefa8295dc0acdad8dc91228d4d1c67b29a31f173488ab553ce7bf7c" Sep 29 19:18:25 crc kubenswrapper[4792]: E0929 19:18:25.902946 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b9a386bdcefa8295dc0acdad8dc91228d4d1c67b29a31f173488ab553ce7bf7c\": container with ID starting with b9a386bdcefa8295dc0acdad8dc91228d4d1c67b29a31f173488ab553ce7bf7c not found: ID does not exist" containerID="b9a386bdcefa8295dc0acdad8dc91228d4d1c67b29a31f173488ab553ce7bf7c" Sep 29 19:18:25 crc kubenswrapper[4792]: I0929 19:18:25.902985 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b9a386bdcefa8295dc0acdad8dc91228d4d1c67b29a31f173488ab553ce7bf7c"} err="failed to get container status \"b9a386bdcefa8295dc0acdad8dc91228d4d1c67b29a31f173488ab553ce7bf7c\": rpc error: code = NotFound desc = could not find container \"b9a386bdcefa8295dc0acdad8dc91228d4d1c67b29a31f173488ab553ce7bf7c\": container with ID starting with b9a386bdcefa8295dc0acdad8dc91228d4d1c67b29a31f173488ab553ce7bf7c not found: ID does not exist" Sep 29 19:18:25 crc kubenswrapper[4792]: I0929 19:18:25.911977 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7wg66\" (UniqueName: \"kubernetes.io/projected/a90baac4-d8fc-472d-992f-c1a36805b12e-kube-api-access-7wg66\") pod \"a90baac4-d8fc-472d-992f-c1a36805b12e\" (UID: \"a90baac4-d8fc-472d-992f-c1a36805b12e\") " Sep 29 19:18:25 crc kubenswrapper[4792]: I0929 19:18:25.923473 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a90baac4-d8fc-472d-992f-c1a36805b12e-kube-api-access-7wg66" (OuterVolumeSpecName: "kube-api-access-7wg66") pod "a90baac4-d8fc-472d-992f-c1a36805b12e" (UID: "a90baac4-d8fc-472d-992f-c1a36805b12e"). InnerVolumeSpecName "kube-api-access-7wg66". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:18:25 crc kubenswrapper[4792]: I0929 19:18:25.929704 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-conductor-0"] Sep 29 19:18:25 crc kubenswrapper[4792]: E0929 19:18:25.930146 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="04e38954-ec57-4d8c-811b-e05b3ca8ba9d" containerName="dnsmasq-dns" Sep 29 19:18:25 crc kubenswrapper[4792]: I0929 19:18:25.930163 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="04e38954-ec57-4d8c-811b-e05b3ca8ba9d" containerName="dnsmasq-dns" Sep 29 19:18:25 crc kubenswrapper[4792]: E0929 19:18:25.930181 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="683f150a-35bc-423d-af3a-005d3a967d88" containerName="nova-manage" Sep 29 19:18:25 crc kubenswrapper[4792]: I0929 19:18:25.930186 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="683f150a-35bc-423d-af3a-005d3a967d88" containerName="nova-manage" Sep 29 19:18:25 crc kubenswrapper[4792]: E0929 19:18:25.930202 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a90baac4-d8fc-472d-992f-c1a36805b12e" containerName="kube-state-metrics" Sep 29 19:18:25 crc kubenswrapper[4792]: I0929 19:18:25.930210 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="a90baac4-d8fc-472d-992f-c1a36805b12e" containerName="kube-state-metrics" Sep 29 19:18:25 crc kubenswrapper[4792]: E0929 19:18:25.930222 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="eb79e1ef-cf0c-407b-9b37-c7ad8d65a3cc" containerName="horizon" Sep 29 19:18:25 crc kubenswrapper[4792]: I0929 19:18:25.930227 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="eb79e1ef-cf0c-407b-9b37-c7ad8d65a3cc" containerName="horizon" Sep 29 19:18:25 crc kubenswrapper[4792]: E0929 19:18:25.930235 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="04e38954-ec57-4d8c-811b-e05b3ca8ba9d" containerName="init" Sep 29 19:18:25 crc kubenswrapper[4792]: I0929 19:18:25.930242 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="04e38954-ec57-4d8c-811b-e05b3ca8ba9d" containerName="init" Sep 29 19:18:25 crc kubenswrapper[4792]: E0929 19:18:25.930258 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3e141a44-a3f2-4d4c-af13-e6ca3d76ea01" containerName="nova-cell1-conductor-db-sync" Sep 29 19:18:25 crc kubenswrapper[4792]: I0929 19:18:25.930264 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="3e141a44-a3f2-4d4c-af13-e6ca3d76ea01" containerName="nova-cell1-conductor-db-sync" Sep 29 19:18:25 crc kubenswrapper[4792]: E0929 19:18:25.930277 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="eb79e1ef-cf0c-407b-9b37-c7ad8d65a3cc" containerName="horizon-log" Sep 29 19:18:25 crc kubenswrapper[4792]: I0929 19:18:25.930282 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="eb79e1ef-cf0c-407b-9b37-c7ad8d65a3cc" containerName="horizon-log" Sep 29 19:18:25 crc kubenswrapper[4792]: I0929 19:18:25.930509 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="683f150a-35bc-423d-af3a-005d3a967d88" containerName="nova-manage" Sep 29 19:18:25 crc kubenswrapper[4792]: I0929 19:18:25.930531 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="04e38954-ec57-4d8c-811b-e05b3ca8ba9d" containerName="dnsmasq-dns" Sep 29 19:18:25 crc kubenswrapper[4792]: I0929 19:18:25.930539 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="eb79e1ef-cf0c-407b-9b37-c7ad8d65a3cc" containerName="horizon" Sep 29 19:18:25 crc kubenswrapper[4792]: I0929 19:18:25.930547 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="3e141a44-a3f2-4d4c-af13-e6ca3d76ea01" containerName="nova-cell1-conductor-db-sync" Sep 29 19:18:25 crc kubenswrapper[4792]: I0929 19:18:25.930557 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="a90baac4-d8fc-472d-992f-c1a36805b12e" containerName="kube-state-metrics" Sep 29 19:18:25 crc kubenswrapper[4792]: I0929 19:18:25.930565 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="eb79e1ef-cf0c-407b-9b37-c7ad8d65a3cc" containerName="horizon-log" Sep 29 19:18:25 crc kubenswrapper[4792]: I0929 19:18:25.930580 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="eb79e1ef-cf0c-407b-9b37-c7ad8d65a3cc" containerName="horizon" Sep 29 19:18:25 crc kubenswrapper[4792]: I0929 19:18:25.931242 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Sep 29 19:18:25 crc kubenswrapper[4792]: I0929 19:18:25.934911 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-config-data" Sep 29 19:18:25 crc kubenswrapper[4792]: I0929 19:18:25.940982 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Sep 29 19:18:26 crc kubenswrapper[4792]: I0929 19:18:26.013864 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/454e3f84-9408-4433-9e44-15dde1100854-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"454e3f84-9408-4433-9e44-15dde1100854\") " pod="openstack/nova-cell1-conductor-0" Sep 29 19:18:26 crc kubenswrapper[4792]: I0929 19:18:26.014160 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nwfnx\" (UniqueName: \"kubernetes.io/projected/454e3f84-9408-4433-9e44-15dde1100854-kube-api-access-nwfnx\") pod \"nova-cell1-conductor-0\" (UID: \"454e3f84-9408-4433-9e44-15dde1100854\") " pod="openstack/nova-cell1-conductor-0" Sep 29 19:18:26 crc kubenswrapper[4792]: I0929 19:18:26.014362 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/454e3f84-9408-4433-9e44-15dde1100854-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"454e3f84-9408-4433-9e44-15dde1100854\") " pod="openstack/nova-cell1-conductor-0" Sep 29 19:18:26 crc kubenswrapper[4792]: I0929 19:18:26.014558 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7wg66\" (UniqueName: \"kubernetes.io/projected/a90baac4-d8fc-472d-992f-c1a36805b12e-kube-api-access-7wg66\") on node \"crc\" DevicePath \"\"" Sep 29 19:18:26 crc kubenswrapper[4792]: I0929 19:18:26.098265 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Sep 29 19:18:26 crc kubenswrapper[4792]: I0929 19:18:26.098555 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="601d6005-0364-43d4-89bc-ecdc60ef2bd9" containerName="nova-api-log" containerID="cri-o://dbaa48e353014d4ed336ac537103e150b1e3ae0b71d7856a80eb43b5ad77eedf" gracePeriod=30 Sep 29 19:18:26 crc kubenswrapper[4792]: I0929 19:18:26.099018 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="601d6005-0364-43d4-89bc-ecdc60ef2bd9" containerName="nova-api-api" containerID="cri-o://736641be11ff3754e43241afbc4740b975c411cfdfccb9f5cf6470f4573f8e68" gracePeriod=30 Sep 29 19:18:26 crc kubenswrapper[4792]: I0929 19:18:26.111361 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Sep 29 19:18:26 crc kubenswrapper[4792]: I0929 19:18:26.120158 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="32dbf736-0a8a-4dfc-a3e3-ae36c3b2aa54" containerName="nova-scheduler-scheduler" containerID="cri-o://073bc9d902930b2c2211466a500c9a47dba48dabe651cac9b09352139a4fbcda" gracePeriod=30 Sep 29 19:18:26 crc kubenswrapper[4792]: I0929 19:18:26.116179 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nwfnx\" (UniqueName: \"kubernetes.io/projected/454e3f84-9408-4433-9e44-15dde1100854-kube-api-access-nwfnx\") pod \"nova-cell1-conductor-0\" (UID: \"454e3f84-9408-4433-9e44-15dde1100854\") " pod="openstack/nova-cell1-conductor-0" Sep 29 19:18:26 crc kubenswrapper[4792]: I0929 19:18:26.120998 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/454e3f84-9408-4433-9e44-15dde1100854-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"454e3f84-9408-4433-9e44-15dde1100854\") " pod="openstack/nova-cell1-conductor-0" Sep 29 19:18:26 crc kubenswrapper[4792]: I0929 19:18:26.121379 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/454e3f84-9408-4433-9e44-15dde1100854-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"454e3f84-9408-4433-9e44-15dde1100854\") " pod="openstack/nova-cell1-conductor-0" Sep 29 19:18:26 crc kubenswrapper[4792]: I0929 19:18:26.130660 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/454e3f84-9408-4433-9e44-15dde1100854-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"454e3f84-9408-4433-9e44-15dde1100854\") " pod="openstack/nova-cell1-conductor-0" Sep 29 19:18:26 crc kubenswrapper[4792]: I0929 19:18:26.131460 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/454e3f84-9408-4433-9e44-15dde1100854-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"454e3f84-9408-4433-9e44-15dde1100854\") " pod="openstack/nova-cell1-conductor-0" Sep 29 19:18:26 crc kubenswrapper[4792]: I0929 19:18:26.154431 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nwfnx\" (UniqueName: \"kubernetes.io/projected/454e3f84-9408-4433-9e44-15dde1100854-kube-api-access-nwfnx\") pod \"nova-cell1-conductor-0\" (UID: \"454e3f84-9408-4433-9e44-15dde1100854\") " pod="openstack/nova-cell1-conductor-0" Sep 29 19:18:26 crc kubenswrapper[4792]: I0929 19:18:26.167486 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/kube-state-metrics-0"] Sep 29 19:18:26 crc kubenswrapper[4792]: I0929 19:18:26.182126 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/kube-state-metrics-0"] Sep 29 19:18:26 crc kubenswrapper[4792]: I0929 19:18:26.196958 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/kube-state-metrics-0"] Sep 29 19:18:26 crc kubenswrapper[4792]: E0929 19:18:26.197431 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="eb79e1ef-cf0c-407b-9b37-c7ad8d65a3cc" containerName="horizon" Sep 29 19:18:26 crc kubenswrapper[4792]: I0929 19:18:26.197448 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="eb79e1ef-cf0c-407b-9b37-c7ad8d65a3cc" containerName="horizon" Sep 29 19:18:26 crc kubenswrapper[4792]: I0929 19:18:26.198337 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Sep 29 19:18:26 crc kubenswrapper[4792]: I0929 19:18:26.201445 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"kube-state-metrics-tls-config" Sep 29 19:18:26 crc kubenswrapper[4792]: I0929 19:18:26.201656 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-kube-state-metrics-svc" Sep 29 19:18:26 crc kubenswrapper[4792]: I0929 19:18:26.210931 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Sep 29 19:18:26 crc kubenswrapper[4792]: I0929 19:18:26.283051 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Sep 29 19:18:26 crc kubenswrapper[4792]: I0929 19:18:26.325331 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/e148125d-7567-47d9-a3c3-32bd51ee3c9c-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"e148125d-7567-47d9-a3c3-32bd51ee3c9c\") " pod="openstack/kube-state-metrics-0" Sep 29 19:18:26 crc kubenswrapper[4792]: I0929 19:18:26.325380 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bhhbs\" (UniqueName: \"kubernetes.io/projected/e148125d-7567-47d9-a3c3-32bd51ee3c9c-kube-api-access-bhhbs\") pod \"kube-state-metrics-0\" (UID: \"e148125d-7567-47d9-a3c3-32bd51ee3c9c\") " pod="openstack/kube-state-metrics-0" Sep 29 19:18:26 crc kubenswrapper[4792]: I0929 19:18:26.325434 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e148125d-7567-47d9-a3c3-32bd51ee3c9c-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"e148125d-7567-47d9-a3c3-32bd51ee3c9c\") " pod="openstack/kube-state-metrics-0" Sep 29 19:18:26 crc kubenswrapper[4792]: I0929 19:18:26.325537 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/e148125d-7567-47d9-a3c3-32bd51ee3c9c-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"e148125d-7567-47d9-a3c3-32bd51ee3c9c\") " pod="openstack/kube-state-metrics-0" Sep 29 19:18:26 crc kubenswrapper[4792]: I0929 19:18:26.427493 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/e148125d-7567-47d9-a3c3-32bd51ee3c9c-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"e148125d-7567-47d9-a3c3-32bd51ee3c9c\") " pod="openstack/kube-state-metrics-0" Sep 29 19:18:26 crc kubenswrapper[4792]: I0929 19:18:26.427835 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/e148125d-7567-47d9-a3c3-32bd51ee3c9c-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"e148125d-7567-47d9-a3c3-32bd51ee3c9c\") " pod="openstack/kube-state-metrics-0" Sep 29 19:18:26 crc kubenswrapper[4792]: I0929 19:18:26.427880 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bhhbs\" (UniqueName: \"kubernetes.io/projected/e148125d-7567-47d9-a3c3-32bd51ee3c9c-kube-api-access-bhhbs\") pod \"kube-state-metrics-0\" (UID: \"e148125d-7567-47d9-a3c3-32bd51ee3c9c\") " pod="openstack/kube-state-metrics-0" Sep 29 19:18:26 crc kubenswrapper[4792]: I0929 19:18:26.427940 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e148125d-7567-47d9-a3c3-32bd51ee3c9c-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"e148125d-7567-47d9-a3c3-32bd51ee3c9c\") " pod="openstack/kube-state-metrics-0" Sep 29 19:18:26 crc kubenswrapper[4792]: I0929 19:18:26.446740 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/e148125d-7567-47d9-a3c3-32bd51ee3c9c-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"e148125d-7567-47d9-a3c3-32bd51ee3c9c\") " pod="openstack/kube-state-metrics-0" Sep 29 19:18:26 crc kubenswrapper[4792]: I0929 19:18:26.450702 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e148125d-7567-47d9-a3c3-32bd51ee3c9c-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"e148125d-7567-47d9-a3c3-32bd51ee3c9c\") " pod="openstack/kube-state-metrics-0" Sep 29 19:18:26 crc kubenswrapper[4792]: I0929 19:18:26.453979 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/e148125d-7567-47d9-a3c3-32bd51ee3c9c-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"e148125d-7567-47d9-a3c3-32bd51ee3c9c\") " pod="openstack/kube-state-metrics-0" Sep 29 19:18:26 crc kubenswrapper[4792]: I0929 19:18:26.460111 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bhhbs\" (UniqueName: \"kubernetes.io/projected/e148125d-7567-47d9-a3c3-32bd51ee3c9c-kube-api-access-bhhbs\") pod \"kube-state-metrics-0\" (UID: \"e148125d-7567-47d9-a3c3-32bd51ee3c9c\") " pod="openstack/kube-state-metrics-0" Sep 29 19:18:26 crc kubenswrapper[4792]: I0929 19:18:26.528013 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Sep 29 19:18:26 crc kubenswrapper[4792]: I0929 19:18:26.789627 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Sep 29 19:18:26 crc kubenswrapper[4792]: I0929 19:18:26.893914 4792 generic.go:334] "Generic (PLEG): container finished" podID="601d6005-0364-43d4-89bc-ecdc60ef2bd9" containerID="dbaa48e353014d4ed336ac537103e150b1e3ae0b71d7856a80eb43b5ad77eedf" exitCode=143 Sep 29 19:18:26 crc kubenswrapper[4792]: I0929 19:18:26.893967 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"601d6005-0364-43d4-89bc-ecdc60ef2bd9","Type":"ContainerDied","Data":"dbaa48e353014d4ed336ac537103e150b1e3ae0b71d7856a80eb43b5ad77eedf"} Sep 29 19:18:26 crc kubenswrapper[4792]: I0929 19:18:26.895946 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"454e3f84-9408-4433-9e44-15dde1100854","Type":"ContainerStarted","Data":"6d60745c1057dcdec82368d5a46eaad375b5f3ecdd0aae096cf12c75fafbb907"} Sep 29 19:18:27 crc kubenswrapper[4792]: I0929 19:18:27.027652 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a90baac4-d8fc-472d-992f-c1a36805b12e" path="/var/lib/kubelet/pods/a90baac4-d8fc-472d-992f-c1a36805b12e/volumes" Sep 29 19:18:27 crc kubenswrapper[4792]: W0929 19:18:27.065438 4792 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode148125d_7567_47d9_a3c3_32bd51ee3c9c.slice/crio-1db7af1a18e0b0c7d969bc4b8410451599dcecb11e79adf78998bb07cff67b41 WatchSource:0}: Error finding container 1db7af1a18e0b0c7d969bc4b8410451599dcecb11e79adf78998bb07cff67b41: Status 404 returned error can't find the container with id 1db7af1a18e0b0c7d969bc4b8410451599dcecb11e79adf78998bb07cff67b41 Sep 29 19:18:27 crc kubenswrapper[4792]: I0929 19:18:27.074051 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Sep 29 19:18:27 crc kubenswrapper[4792]: I0929 19:18:27.798050 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Sep 29 19:18:27 crc kubenswrapper[4792]: I0929 19:18:27.798790 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="e1223f15-a51f-4a79-900d-aedf61ea6515" containerName="ceilometer-central-agent" containerID="cri-o://fead914b9dae4990de78949f0c6927f2fbc892b74b5bcc0c10290c7c6ab3fc67" gracePeriod=30 Sep 29 19:18:27 crc kubenswrapper[4792]: I0929 19:18:27.798882 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="e1223f15-a51f-4a79-900d-aedf61ea6515" containerName="sg-core" containerID="cri-o://4f9961a72884934fb318d07a9cadf41e25b23e1fa38d13d1a16a1f2ed652c083" gracePeriod=30 Sep 29 19:18:27 crc kubenswrapper[4792]: I0929 19:18:27.798916 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="e1223f15-a51f-4a79-900d-aedf61ea6515" containerName="ceilometer-notification-agent" containerID="cri-o://78be82762986ee2f2337a65f517e83f970a822199623e58218aeede506872bc6" gracePeriod=30 Sep 29 19:18:27 crc kubenswrapper[4792]: I0929 19:18:27.799057 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="e1223f15-a51f-4a79-900d-aedf61ea6515" containerName="proxy-httpd" containerID="cri-o://37803a381b2e7c8b62b128f03c144e6b74af030cec806470f0595aa5eea9807a" gracePeriod=30 Sep 29 19:18:27 crc kubenswrapper[4792]: I0929 19:18:27.907262 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"e148125d-7567-47d9-a3c3-32bd51ee3c9c","Type":"ContainerStarted","Data":"062427e412c0d5c8d8986a9232eb1e0eecb5ea545fe97155c1999713bf30bb4a"} Sep 29 19:18:27 crc kubenswrapper[4792]: I0929 19:18:27.907651 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/kube-state-metrics-0" Sep 29 19:18:27 crc kubenswrapper[4792]: I0929 19:18:27.907675 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"e148125d-7567-47d9-a3c3-32bd51ee3c9c","Type":"ContainerStarted","Data":"1db7af1a18e0b0c7d969bc4b8410451599dcecb11e79adf78998bb07cff67b41"} Sep 29 19:18:27 crc kubenswrapper[4792]: I0929 19:18:27.908803 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"454e3f84-9408-4433-9e44-15dde1100854","Type":"ContainerStarted","Data":"2ad3cf22d3517d771ef5446222b6c5d25715ce13af90646a7d05404ea62859ef"} Sep 29 19:18:27 crc kubenswrapper[4792]: I0929 19:18:27.909152 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-conductor-0" Sep 29 19:18:28 crc kubenswrapper[4792]: I0929 19:18:28.018966 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/kube-state-metrics-0" podStartSLOduration=1.656155561 podStartE2EDuration="2.018943427s" podCreationTimestamp="2025-09-29 19:18:26 +0000 UTC" firstStartedPulling="2025-09-29 19:18:27.072960769 +0000 UTC m=+1319.066268175" lastFinishedPulling="2025-09-29 19:18:27.435748645 +0000 UTC m=+1319.429056041" observedRunningTime="2025-09-29 19:18:27.959290877 +0000 UTC m=+1319.952598283" watchObservedRunningTime="2025-09-29 19:18:28.018943427 +0000 UTC m=+1320.012250823" Sep 29 19:18:28 crc kubenswrapper[4792]: I0929 19:18:28.019627 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-conductor-0" podStartSLOduration=3.019622555 podStartE2EDuration="3.019622555s" podCreationTimestamp="2025-09-29 19:18:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 19:18:28.003049454 +0000 UTC m=+1319.996356870" watchObservedRunningTime="2025-09-29 19:18:28.019622555 +0000 UTC m=+1320.012929951" Sep 29 19:18:28 crc kubenswrapper[4792]: E0929 19:18:28.213576 4792 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="073bc9d902930b2c2211466a500c9a47dba48dabe651cac9b09352139a4fbcda" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Sep 29 19:18:28 crc kubenswrapper[4792]: E0929 19:18:28.214880 4792 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="073bc9d902930b2c2211466a500c9a47dba48dabe651cac9b09352139a4fbcda" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Sep 29 19:18:28 crc kubenswrapper[4792]: E0929 19:18:28.215960 4792 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="073bc9d902930b2c2211466a500c9a47dba48dabe651cac9b09352139a4fbcda" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Sep 29 19:18:28 crc kubenswrapper[4792]: E0929 19:18:28.216103 4792 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/nova-scheduler-0" podUID="32dbf736-0a8a-4dfc-a3e3-ae36c3b2aa54" containerName="nova-scheduler-scheduler" Sep 29 19:18:28 crc kubenswrapper[4792]: I0929 19:18:28.932662 4792 generic.go:334] "Generic (PLEG): container finished" podID="e1223f15-a51f-4a79-900d-aedf61ea6515" containerID="37803a381b2e7c8b62b128f03c144e6b74af030cec806470f0595aa5eea9807a" exitCode=0 Sep 29 19:18:28 crc kubenswrapper[4792]: I0929 19:18:28.933063 4792 generic.go:334] "Generic (PLEG): container finished" podID="e1223f15-a51f-4a79-900d-aedf61ea6515" containerID="4f9961a72884934fb318d07a9cadf41e25b23e1fa38d13d1a16a1f2ed652c083" exitCode=2 Sep 29 19:18:28 crc kubenswrapper[4792]: I0929 19:18:28.933071 4792 generic.go:334] "Generic (PLEG): container finished" podID="e1223f15-a51f-4a79-900d-aedf61ea6515" containerID="78be82762986ee2f2337a65f517e83f970a822199623e58218aeede506872bc6" exitCode=0 Sep 29 19:18:28 crc kubenswrapper[4792]: I0929 19:18:28.933078 4792 generic.go:334] "Generic (PLEG): container finished" podID="e1223f15-a51f-4a79-900d-aedf61ea6515" containerID="fead914b9dae4990de78949f0c6927f2fbc892b74b5bcc0c10290c7c6ab3fc67" exitCode=0 Sep 29 19:18:28 crc kubenswrapper[4792]: I0929 19:18:28.933813 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e1223f15-a51f-4a79-900d-aedf61ea6515","Type":"ContainerDied","Data":"37803a381b2e7c8b62b128f03c144e6b74af030cec806470f0595aa5eea9807a"} Sep 29 19:18:28 crc kubenswrapper[4792]: I0929 19:18:28.933836 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e1223f15-a51f-4a79-900d-aedf61ea6515","Type":"ContainerDied","Data":"4f9961a72884934fb318d07a9cadf41e25b23e1fa38d13d1a16a1f2ed652c083"} Sep 29 19:18:28 crc kubenswrapper[4792]: I0929 19:18:28.933876 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e1223f15-a51f-4a79-900d-aedf61ea6515","Type":"ContainerDied","Data":"78be82762986ee2f2337a65f517e83f970a822199623e58218aeede506872bc6"} Sep 29 19:18:28 crc kubenswrapper[4792]: I0929 19:18:28.933888 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e1223f15-a51f-4a79-900d-aedf61ea6515","Type":"ContainerDied","Data":"fead914b9dae4990de78949f0c6927f2fbc892b74b5bcc0c10290c7c6ab3fc67"} Sep 29 19:18:29 crc kubenswrapper[4792]: I0929 19:18:29.121086 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 29 19:18:29 crc kubenswrapper[4792]: I0929 19:18:29.157570 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5qt25\" (UniqueName: \"kubernetes.io/projected/e1223f15-a51f-4a79-900d-aedf61ea6515-kube-api-access-5qt25\") pod \"e1223f15-a51f-4a79-900d-aedf61ea6515\" (UID: \"e1223f15-a51f-4a79-900d-aedf61ea6515\") " Sep 29 19:18:29 crc kubenswrapper[4792]: I0929 19:18:29.157686 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e1223f15-a51f-4a79-900d-aedf61ea6515-scripts\") pod \"e1223f15-a51f-4a79-900d-aedf61ea6515\" (UID: \"e1223f15-a51f-4a79-900d-aedf61ea6515\") " Sep 29 19:18:29 crc kubenswrapper[4792]: I0929 19:18:29.157779 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e1223f15-a51f-4a79-900d-aedf61ea6515-config-data\") pod \"e1223f15-a51f-4a79-900d-aedf61ea6515\" (UID: \"e1223f15-a51f-4a79-900d-aedf61ea6515\") " Sep 29 19:18:29 crc kubenswrapper[4792]: I0929 19:18:29.157814 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/e1223f15-a51f-4a79-900d-aedf61ea6515-sg-core-conf-yaml\") pod \"e1223f15-a51f-4a79-900d-aedf61ea6515\" (UID: \"e1223f15-a51f-4a79-900d-aedf61ea6515\") " Sep 29 19:18:29 crc kubenswrapper[4792]: I0929 19:18:29.157879 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e1223f15-a51f-4a79-900d-aedf61ea6515-combined-ca-bundle\") pod \"e1223f15-a51f-4a79-900d-aedf61ea6515\" (UID: \"e1223f15-a51f-4a79-900d-aedf61ea6515\") " Sep 29 19:18:29 crc kubenswrapper[4792]: I0929 19:18:29.158003 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e1223f15-a51f-4a79-900d-aedf61ea6515-run-httpd\") pod \"e1223f15-a51f-4a79-900d-aedf61ea6515\" (UID: \"e1223f15-a51f-4a79-900d-aedf61ea6515\") " Sep 29 19:18:29 crc kubenswrapper[4792]: I0929 19:18:29.158064 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e1223f15-a51f-4a79-900d-aedf61ea6515-log-httpd\") pod \"e1223f15-a51f-4a79-900d-aedf61ea6515\" (UID: \"e1223f15-a51f-4a79-900d-aedf61ea6515\") " Sep 29 19:18:29 crc kubenswrapper[4792]: I0929 19:18:29.158435 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e1223f15-a51f-4a79-900d-aedf61ea6515-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "e1223f15-a51f-4a79-900d-aedf61ea6515" (UID: "e1223f15-a51f-4a79-900d-aedf61ea6515"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 19:18:29 crc kubenswrapper[4792]: I0929 19:18:29.158675 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e1223f15-a51f-4a79-900d-aedf61ea6515-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "e1223f15-a51f-4a79-900d-aedf61ea6515" (UID: "e1223f15-a51f-4a79-900d-aedf61ea6515"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 19:18:29 crc kubenswrapper[4792]: I0929 19:18:29.158984 4792 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e1223f15-a51f-4a79-900d-aedf61ea6515-log-httpd\") on node \"crc\" DevicePath \"\"" Sep 29 19:18:29 crc kubenswrapper[4792]: I0929 19:18:29.159008 4792 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e1223f15-a51f-4a79-900d-aedf61ea6515-run-httpd\") on node \"crc\" DevicePath \"\"" Sep 29 19:18:29 crc kubenswrapper[4792]: I0929 19:18:29.186725 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e1223f15-a51f-4a79-900d-aedf61ea6515-scripts" (OuterVolumeSpecName: "scripts") pod "e1223f15-a51f-4a79-900d-aedf61ea6515" (UID: "e1223f15-a51f-4a79-900d-aedf61ea6515"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:18:29 crc kubenswrapper[4792]: I0929 19:18:29.187191 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e1223f15-a51f-4a79-900d-aedf61ea6515-kube-api-access-5qt25" (OuterVolumeSpecName: "kube-api-access-5qt25") pod "e1223f15-a51f-4a79-900d-aedf61ea6515" (UID: "e1223f15-a51f-4a79-900d-aedf61ea6515"). InnerVolumeSpecName "kube-api-access-5qt25". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:18:29 crc kubenswrapper[4792]: I0929 19:18:29.260231 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5qt25\" (UniqueName: \"kubernetes.io/projected/e1223f15-a51f-4a79-900d-aedf61ea6515-kube-api-access-5qt25\") on node \"crc\" DevicePath \"\"" Sep 29 19:18:29 crc kubenswrapper[4792]: I0929 19:18:29.260731 4792 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e1223f15-a51f-4a79-900d-aedf61ea6515-scripts\") on node \"crc\" DevicePath \"\"" Sep 29 19:18:29 crc kubenswrapper[4792]: I0929 19:18:29.305118 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e1223f15-a51f-4a79-900d-aedf61ea6515-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "e1223f15-a51f-4a79-900d-aedf61ea6515" (UID: "e1223f15-a51f-4a79-900d-aedf61ea6515"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:18:29 crc kubenswrapper[4792]: I0929 19:18:29.305573 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e1223f15-a51f-4a79-900d-aedf61ea6515-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "e1223f15-a51f-4a79-900d-aedf61ea6515" (UID: "e1223f15-a51f-4a79-900d-aedf61ea6515"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:18:29 crc kubenswrapper[4792]: I0929 19:18:29.333903 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e1223f15-a51f-4a79-900d-aedf61ea6515-config-data" (OuterVolumeSpecName: "config-data") pod "e1223f15-a51f-4a79-900d-aedf61ea6515" (UID: "e1223f15-a51f-4a79-900d-aedf61ea6515"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:18:29 crc kubenswrapper[4792]: I0929 19:18:29.362132 4792 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e1223f15-a51f-4a79-900d-aedf61ea6515-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 19:18:29 crc kubenswrapper[4792]: I0929 19:18:29.362177 4792 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/e1223f15-a51f-4a79-900d-aedf61ea6515-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Sep 29 19:18:29 crc kubenswrapper[4792]: I0929 19:18:29.362191 4792 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e1223f15-a51f-4a79-900d-aedf61ea6515-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 19:18:29 crc kubenswrapper[4792]: I0929 19:18:29.943043 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e1223f15-a51f-4a79-900d-aedf61ea6515","Type":"ContainerDied","Data":"21bae07bc1703bef01a03dfc6b78b89d222286b14cebd733d9c684e812c16d6f"} Sep 29 19:18:29 crc kubenswrapper[4792]: I0929 19:18:29.943128 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 29 19:18:29 crc kubenswrapper[4792]: I0929 19:18:29.943352 4792 scope.go:117] "RemoveContainer" containerID="37803a381b2e7c8b62b128f03c144e6b74af030cec806470f0595aa5eea9807a" Sep 29 19:18:29 crc kubenswrapper[4792]: I0929 19:18:29.990578 4792 scope.go:117] "RemoveContainer" containerID="4f9961a72884934fb318d07a9cadf41e25b23e1fa38d13d1a16a1f2ed652c083" Sep 29 19:18:29 crc kubenswrapper[4792]: I0929 19:18:29.991365 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Sep 29 19:18:29 crc kubenswrapper[4792]: I0929 19:18:29.999264 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Sep 29 19:18:30 crc kubenswrapper[4792]: I0929 19:18:30.017485 4792 scope.go:117] "RemoveContainer" containerID="78be82762986ee2f2337a65f517e83f970a822199623e58218aeede506872bc6" Sep 29 19:18:30 crc kubenswrapper[4792]: I0929 19:18:30.030377 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Sep 29 19:18:30 crc kubenswrapper[4792]: E0929 19:18:30.030824 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e1223f15-a51f-4a79-900d-aedf61ea6515" containerName="ceilometer-central-agent" Sep 29 19:18:30 crc kubenswrapper[4792]: I0929 19:18:30.030866 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="e1223f15-a51f-4a79-900d-aedf61ea6515" containerName="ceilometer-central-agent" Sep 29 19:18:30 crc kubenswrapper[4792]: E0929 19:18:30.030892 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e1223f15-a51f-4a79-900d-aedf61ea6515" containerName="ceilometer-notification-agent" Sep 29 19:18:30 crc kubenswrapper[4792]: I0929 19:18:30.030901 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="e1223f15-a51f-4a79-900d-aedf61ea6515" containerName="ceilometer-notification-agent" Sep 29 19:18:30 crc kubenswrapper[4792]: E0929 19:18:30.030919 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e1223f15-a51f-4a79-900d-aedf61ea6515" containerName="sg-core" Sep 29 19:18:30 crc kubenswrapper[4792]: I0929 19:18:30.030927 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="e1223f15-a51f-4a79-900d-aedf61ea6515" containerName="sg-core" Sep 29 19:18:30 crc kubenswrapper[4792]: E0929 19:18:30.030950 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e1223f15-a51f-4a79-900d-aedf61ea6515" containerName="proxy-httpd" Sep 29 19:18:30 crc kubenswrapper[4792]: I0929 19:18:30.030957 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="e1223f15-a51f-4a79-900d-aedf61ea6515" containerName="proxy-httpd" Sep 29 19:18:30 crc kubenswrapper[4792]: I0929 19:18:30.031420 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="e1223f15-a51f-4a79-900d-aedf61ea6515" containerName="ceilometer-notification-agent" Sep 29 19:18:30 crc kubenswrapper[4792]: I0929 19:18:30.031455 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="e1223f15-a51f-4a79-900d-aedf61ea6515" containerName="proxy-httpd" Sep 29 19:18:30 crc kubenswrapper[4792]: I0929 19:18:30.031472 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="e1223f15-a51f-4a79-900d-aedf61ea6515" containerName="sg-core" Sep 29 19:18:30 crc kubenswrapper[4792]: I0929 19:18:30.031487 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="e1223f15-a51f-4a79-900d-aedf61ea6515" containerName="ceilometer-central-agent" Sep 29 19:18:30 crc kubenswrapper[4792]: I0929 19:18:30.033205 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 29 19:18:30 crc kubenswrapper[4792]: I0929 19:18:30.037091 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ceilometer-internal-svc" Sep 29 19:18:30 crc kubenswrapper[4792]: I0929 19:18:30.037261 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Sep 29 19:18:30 crc kubenswrapper[4792]: I0929 19:18:30.037377 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Sep 29 19:18:30 crc kubenswrapper[4792]: I0929 19:18:30.037778 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Sep 29 19:18:30 crc kubenswrapper[4792]: I0929 19:18:30.077316 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/91353c86-67a7-4198-9391-52f85954178b-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"91353c86-67a7-4198-9391-52f85954178b\") " pod="openstack/ceilometer-0" Sep 29 19:18:30 crc kubenswrapper[4792]: I0929 19:18:30.077381 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/91353c86-67a7-4198-9391-52f85954178b-run-httpd\") pod \"ceilometer-0\" (UID: \"91353c86-67a7-4198-9391-52f85954178b\") " pod="openstack/ceilometer-0" Sep 29 19:18:30 crc kubenswrapper[4792]: I0929 19:18:30.077422 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/91353c86-67a7-4198-9391-52f85954178b-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"91353c86-67a7-4198-9391-52f85954178b\") " pod="openstack/ceilometer-0" Sep 29 19:18:30 crc kubenswrapper[4792]: I0929 19:18:30.077450 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/91353c86-67a7-4198-9391-52f85954178b-config-data\") pod \"ceilometer-0\" (UID: \"91353c86-67a7-4198-9391-52f85954178b\") " pod="openstack/ceilometer-0" Sep 29 19:18:30 crc kubenswrapper[4792]: I0929 19:18:30.077492 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/91353c86-67a7-4198-9391-52f85954178b-log-httpd\") pod \"ceilometer-0\" (UID: \"91353c86-67a7-4198-9391-52f85954178b\") " pod="openstack/ceilometer-0" Sep 29 19:18:30 crc kubenswrapper[4792]: I0929 19:18:30.077539 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dmxmh\" (UniqueName: \"kubernetes.io/projected/91353c86-67a7-4198-9391-52f85954178b-kube-api-access-dmxmh\") pod \"ceilometer-0\" (UID: \"91353c86-67a7-4198-9391-52f85954178b\") " pod="openstack/ceilometer-0" Sep 29 19:18:30 crc kubenswrapper[4792]: I0929 19:18:30.077573 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/91353c86-67a7-4198-9391-52f85954178b-scripts\") pod \"ceilometer-0\" (UID: \"91353c86-67a7-4198-9391-52f85954178b\") " pod="openstack/ceilometer-0" Sep 29 19:18:30 crc kubenswrapper[4792]: I0929 19:18:30.077668 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/91353c86-67a7-4198-9391-52f85954178b-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"91353c86-67a7-4198-9391-52f85954178b\") " pod="openstack/ceilometer-0" Sep 29 19:18:30 crc kubenswrapper[4792]: I0929 19:18:30.109769 4792 scope.go:117] "RemoveContainer" containerID="fead914b9dae4990de78949f0c6927f2fbc892b74b5bcc0c10290c7c6ab3fc67" Sep 29 19:18:30 crc kubenswrapper[4792]: I0929 19:18:30.179232 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/91353c86-67a7-4198-9391-52f85954178b-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"91353c86-67a7-4198-9391-52f85954178b\") " pod="openstack/ceilometer-0" Sep 29 19:18:30 crc kubenswrapper[4792]: I0929 19:18:30.179278 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/91353c86-67a7-4198-9391-52f85954178b-config-data\") pod \"ceilometer-0\" (UID: \"91353c86-67a7-4198-9391-52f85954178b\") " pod="openstack/ceilometer-0" Sep 29 19:18:30 crc kubenswrapper[4792]: I0929 19:18:30.179313 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/91353c86-67a7-4198-9391-52f85954178b-log-httpd\") pod \"ceilometer-0\" (UID: \"91353c86-67a7-4198-9391-52f85954178b\") " pod="openstack/ceilometer-0" Sep 29 19:18:30 crc kubenswrapper[4792]: I0929 19:18:30.179331 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dmxmh\" (UniqueName: \"kubernetes.io/projected/91353c86-67a7-4198-9391-52f85954178b-kube-api-access-dmxmh\") pod \"ceilometer-0\" (UID: \"91353c86-67a7-4198-9391-52f85954178b\") " pod="openstack/ceilometer-0" Sep 29 19:18:30 crc kubenswrapper[4792]: I0929 19:18:30.179358 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/91353c86-67a7-4198-9391-52f85954178b-scripts\") pod \"ceilometer-0\" (UID: \"91353c86-67a7-4198-9391-52f85954178b\") " pod="openstack/ceilometer-0" Sep 29 19:18:30 crc kubenswrapper[4792]: I0929 19:18:30.179414 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/91353c86-67a7-4198-9391-52f85954178b-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"91353c86-67a7-4198-9391-52f85954178b\") " pod="openstack/ceilometer-0" Sep 29 19:18:30 crc kubenswrapper[4792]: I0929 19:18:30.179470 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/91353c86-67a7-4198-9391-52f85954178b-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"91353c86-67a7-4198-9391-52f85954178b\") " pod="openstack/ceilometer-0" Sep 29 19:18:30 crc kubenswrapper[4792]: I0929 19:18:30.179496 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/91353c86-67a7-4198-9391-52f85954178b-run-httpd\") pod \"ceilometer-0\" (UID: \"91353c86-67a7-4198-9391-52f85954178b\") " pod="openstack/ceilometer-0" Sep 29 19:18:30 crc kubenswrapper[4792]: I0929 19:18:30.180704 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/91353c86-67a7-4198-9391-52f85954178b-log-httpd\") pod \"ceilometer-0\" (UID: \"91353c86-67a7-4198-9391-52f85954178b\") " pod="openstack/ceilometer-0" Sep 29 19:18:30 crc kubenswrapper[4792]: I0929 19:18:30.180834 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/91353c86-67a7-4198-9391-52f85954178b-run-httpd\") pod \"ceilometer-0\" (UID: \"91353c86-67a7-4198-9391-52f85954178b\") " pod="openstack/ceilometer-0" Sep 29 19:18:30 crc kubenswrapper[4792]: I0929 19:18:30.189539 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/91353c86-67a7-4198-9391-52f85954178b-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"91353c86-67a7-4198-9391-52f85954178b\") " pod="openstack/ceilometer-0" Sep 29 19:18:30 crc kubenswrapper[4792]: I0929 19:18:30.189566 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/91353c86-67a7-4198-9391-52f85954178b-scripts\") pod \"ceilometer-0\" (UID: \"91353c86-67a7-4198-9391-52f85954178b\") " pod="openstack/ceilometer-0" Sep 29 19:18:30 crc kubenswrapper[4792]: I0929 19:18:30.189906 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/91353c86-67a7-4198-9391-52f85954178b-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"91353c86-67a7-4198-9391-52f85954178b\") " pod="openstack/ceilometer-0" Sep 29 19:18:30 crc kubenswrapper[4792]: I0929 19:18:30.190098 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/91353c86-67a7-4198-9391-52f85954178b-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"91353c86-67a7-4198-9391-52f85954178b\") " pod="openstack/ceilometer-0" Sep 29 19:18:30 crc kubenswrapper[4792]: I0929 19:18:30.190689 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/91353c86-67a7-4198-9391-52f85954178b-config-data\") pod \"ceilometer-0\" (UID: \"91353c86-67a7-4198-9391-52f85954178b\") " pod="openstack/ceilometer-0" Sep 29 19:18:30 crc kubenswrapper[4792]: I0929 19:18:30.196595 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dmxmh\" (UniqueName: \"kubernetes.io/projected/91353c86-67a7-4198-9391-52f85954178b-kube-api-access-dmxmh\") pod \"ceilometer-0\" (UID: \"91353c86-67a7-4198-9391-52f85954178b\") " pod="openstack/ceilometer-0" Sep 29 19:18:30 crc kubenswrapper[4792]: I0929 19:18:30.405474 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Sep 29 19:18:30 crc kubenswrapper[4792]: I0929 19:18:30.407046 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 29 19:18:30 crc kubenswrapper[4792]: I0929 19:18:30.487687 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/32dbf736-0a8a-4dfc-a3e3-ae36c3b2aa54-combined-ca-bundle\") pod \"32dbf736-0a8a-4dfc-a3e3-ae36c3b2aa54\" (UID: \"32dbf736-0a8a-4dfc-a3e3-ae36c3b2aa54\") " Sep 29 19:18:30 crc kubenswrapper[4792]: I0929 19:18:30.487801 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/32dbf736-0a8a-4dfc-a3e3-ae36c3b2aa54-config-data\") pod \"32dbf736-0a8a-4dfc-a3e3-ae36c3b2aa54\" (UID: \"32dbf736-0a8a-4dfc-a3e3-ae36c3b2aa54\") " Sep 29 19:18:30 crc kubenswrapper[4792]: I0929 19:18:30.487888 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-r664z\" (UniqueName: \"kubernetes.io/projected/32dbf736-0a8a-4dfc-a3e3-ae36c3b2aa54-kube-api-access-r664z\") pod \"32dbf736-0a8a-4dfc-a3e3-ae36c3b2aa54\" (UID: \"32dbf736-0a8a-4dfc-a3e3-ae36c3b2aa54\") " Sep 29 19:18:30 crc kubenswrapper[4792]: I0929 19:18:30.493264 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/32dbf736-0a8a-4dfc-a3e3-ae36c3b2aa54-kube-api-access-r664z" (OuterVolumeSpecName: "kube-api-access-r664z") pod "32dbf736-0a8a-4dfc-a3e3-ae36c3b2aa54" (UID: "32dbf736-0a8a-4dfc-a3e3-ae36c3b2aa54"). InnerVolumeSpecName "kube-api-access-r664z". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:18:30 crc kubenswrapper[4792]: I0929 19:18:30.520598 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/32dbf736-0a8a-4dfc-a3e3-ae36c3b2aa54-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "32dbf736-0a8a-4dfc-a3e3-ae36c3b2aa54" (UID: "32dbf736-0a8a-4dfc-a3e3-ae36c3b2aa54"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:18:30 crc kubenswrapper[4792]: I0929 19:18:30.529305 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/32dbf736-0a8a-4dfc-a3e3-ae36c3b2aa54-config-data" (OuterVolumeSpecName: "config-data") pod "32dbf736-0a8a-4dfc-a3e3-ae36c3b2aa54" (UID: "32dbf736-0a8a-4dfc-a3e3-ae36c3b2aa54"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:18:30 crc kubenswrapper[4792]: I0929 19:18:30.590373 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-r664z\" (UniqueName: \"kubernetes.io/projected/32dbf736-0a8a-4dfc-a3e3-ae36c3b2aa54-kube-api-access-r664z\") on node \"crc\" DevicePath \"\"" Sep 29 19:18:30 crc kubenswrapper[4792]: I0929 19:18:30.590403 4792 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/32dbf736-0a8a-4dfc-a3e3-ae36c3b2aa54-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 19:18:30 crc kubenswrapper[4792]: I0929 19:18:30.590413 4792 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/32dbf736-0a8a-4dfc-a3e3-ae36c3b2aa54-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 19:18:30 crc kubenswrapper[4792]: I0929 19:18:30.900269 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Sep 29 19:18:30 crc kubenswrapper[4792]: I0929 19:18:30.959823 4792 generic.go:334] "Generic (PLEG): container finished" podID="32dbf736-0a8a-4dfc-a3e3-ae36c3b2aa54" containerID="073bc9d902930b2c2211466a500c9a47dba48dabe651cac9b09352139a4fbcda" exitCode=0 Sep 29 19:18:30 crc kubenswrapper[4792]: I0929 19:18:30.959910 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"32dbf736-0a8a-4dfc-a3e3-ae36c3b2aa54","Type":"ContainerDied","Data":"073bc9d902930b2c2211466a500c9a47dba48dabe651cac9b09352139a4fbcda"} Sep 29 19:18:30 crc kubenswrapper[4792]: I0929 19:18:30.959936 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"32dbf736-0a8a-4dfc-a3e3-ae36c3b2aa54","Type":"ContainerDied","Data":"5ae2922fca1f8ffbbd0ad5e6328b2eaa58ce78524496bcd89144b93ad87ac513"} Sep 29 19:18:30 crc kubenswrapper[4792]: I0929 19:18:30.959961 4792 scope.go:117] "RemoveContainer" containerID="073bc9d902930b2c2211466a500c9a47dba48dabe651cac9b09352139a4fbcda" Sep 29 19:18:30 crc kubenswrapper[4792]: I0929 19:18:30.960383 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Sep 29 19:18:30 crc kubenswrapper[4792]: I0929 19:18:30.964057 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"91353c86-67a7-4198-9391-52f85954178b","Type":"ContainerStarted","Data":"6da737f91740811b06392df0cb9a37afc3925a9fad4058839a43b2a1bc0d608c"} Sep 29 19:18:31 crc kubenswrapper[4792]: I0929 19:18:31.006436 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Sep 29 19:18:31 crc kubenswrapper[4792]: I0929 19:18:31.010036 4792 scope.go:117] "RemoveContainer" containerID="073bc9d902930b2c2211466a500c9a47dba48dabe651cac9b09352139a4fbcda" Sep 29 19:18:31 crc kubenswrapper[4792]: E0929 19:18:31.010608 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"073bc9d902930b2c2211466a500c9a47dba48dabe651cac9b09352139a4fbcda\": container with ID starting with 073bc9d902930b2c2211466a500c9a47dba48dabe651cac9b09352139a4fbcda not found: ID does not exist" containerID="073bc9d902930b2c2211466a500c9a47dba48dabe651cac9b09352139a4fbcda" Sep 29 19:18:31 crc kubenswrapper[4792]: I0929 19:18:31.010739 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"073bc9d902930b2c2211466a500c9a47dba48dabe651cac9b09352139a4fbcda"} err="failed to get container status \"073bc9d902930b2c2211466a500c9a47dba48dabe651cac9b09352139a4fbcda\": rpc error: code = NotFound desc = could not find container \"073bc9d902930b2c2211466a500c9a47dba48dabe651cac9b09352139a4fbcda\": container with ID starting with 073bc9d902930b2c2211466a500c9a47dba48dabe651cac9b09352139a4fbcda not found: ID does not exist" Sep 29 19:18:31 crc kubenswrapper[4792]: I0929 19:18:31.029174 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e1223f15-a51f-4a79-900d-aedf61ea6515" path="/var/lib/kubelet/pods/e1223f15-a51f-4a79-900d-aedf61ea6515/volumes" Sep 29 19:18:31 crc kubenswrapper[4792]: I0929 19:18:31.030029 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Sep 29 19:18:31 crc kubenswrapper[4792]: I0929 19:18:31.030058 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Sep 29 19:18:31 crc kubenswrapper[4792]: E0929 19:18:31.030696 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="32dbf736-0a8a-4dfc-a3e3-ae36c3b2aa54" containerName="nova-scheduler-scheduler" Sep 29 19:18:31 crc kubenswrapper[4792]: I0929 19:18:31.030718 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="32dbf736-0a8a-4dfc-a3e3-ae36c3b2aa54" containerName="nova-scheduler-scheduler" Sep 29 19:18:31 crc kubenswrapper[4792]: I0929 19:18:31.031546 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="32dbf736-0a8a-4dfc-a3e3-ae36c3b2aa54" containerName="nova-scheduler-scheduler" Sep 29 19:18:31 crc kubenswrapper[4792]: I0929 19:18:31.032311 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Sep 29 19:18:31 crc kubenswrapper[4792]: I0929 19:18:31.034074 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Sep 29 19:18:31 crc kubenswrapper[4792]: I0929 19:18:31.034738 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Sep 29 19:18:31 crc kubenswrapper[4792]: I0929 19:18:31.099801 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c0cab82e-384f-4a9d-bf43-a72af2f53e79-config-data\") pod \"nova-scheduler-0\" (UID: \"c0cab82e-384f-4a9d-bf43-a72af2f53e79\") " pod="openstack/nova-scheduler-0" Sep 29 19:18:31 crc kubenswrapper[4792]: I0929 19:18:31.100047 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c0cab82e-384f-4a9d-bf43-a72af2f53e79-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"c0cab82e-384f-4a9d-bf43-a72af2f53e79\") " pod="openstack/nova-scheduler-0" Sep 29 19:18:31 crc kubenswrapper[4792]: I0929 19:18:31.100130 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gdf5z\" (UniqueName: \"kubernetes.io/projected/c0cab82e-384f-4a9d-bf43-a72af2f53e79-kube-api-access-gdf5z\") pod \"nova-scheduler-0\" (UID: \"c0cab82e-384f-4a9d-bf43-a72af2f53e79\") " pod="openstack/nova-scheduler-0" Sep 29 19:18:31 crc kubenswrapper[4792]: I0929 19:18:31.202410 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c0cab82e-384f-4a9d-bf43-a72af2f53e79-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"c0cab82e-384f-4a9d-bf43-a72af2f53e79\") " pod="openstack/nova-scheduler-0" Sep 29 19:18:31 crc kubenswrapper[4792]: I0929 19:18:31.202928 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gdf5z\" (UniqueName: \"kubernetes.io/projected/c0cab82e-384f-4a9d-bf43-a72af2f53e79-kube-api-access-gdf5z\") pod \"nova-scheduler-0\" (UID: \"c0cab82e-384f-4a9d-bf43-a72af2f53e79\") " pod="openstack/nova-scheduler-0" Sep 29 19:18:31 crc kubenswrapper[4792]: I0929 19:18:31.203072 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c0cab82e-384f-4a9d-bf43-a72af2f53e79-config-data\") pod \"nova-scheduler-0\" (UID: \"c0cab82e-384f-4a9d-bf43-a72af2f53e79\") " pod="openstack/nova-scheduler-0" Sep 29 19:18:31 crc kubenswrapper[4792]: I0929 19:18:31.210669 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c0cab82e-384f-4a9d-bf43-a72af2f53e79-config-data\") pod \"nova-scheduler-0\" (UID: \"c0cab82e-384f-4a9d-bf43-a72af2f53e79\") " pod="openstack/nova-scheduler-0" Sep 29 19:18:31 crc kubenswrapper[4792]: I0929 19:18:31.225364 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c0cab82e-384f-4a9d-bf43-a72af2f53e79-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"c0cab82e-384f-4a9d-bf43-a72af2f53e79\") " pod="openstack/nova-scheduler-0" Sep 29 19:18:31 crc kubenswrapper[4792]: I0929 19:18:31.231448 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gdf5z\" (UniqueName: \"kubernetes.io/projected/c0cab82e-384f-4a9d-bf43-a72af2f53e79-kube-api-access-gdf5z\") pod \"nova-scheduler-0\" (UID: \"c0cab82e-384f-4a9d-bf43-a72af2f53e79\") " pod="openstack/nova-scheduler-0" Sep 29 19:18:31 crc kubenswrapper[4792]: I0929 19:18:31.353174 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Sep 29 19:18:31 crc kubenswrapper[4792]: I0929 19:18:31.801910 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Sep 29 19:18:31 crc kubenswrapper[4792]: I0929 19:18:31.969823 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Sep 29 19:18:31 crc kubenswrapper[4792]: I0929 19:18:31.974280 4792 generic.go:334] "Generic (PLEG): container finished" podID="601d6005-0364-43d4-89bc-ecdc60ef2bd9" containerID="736641be11ff3754e43241afbc4740b975c411cfdfccb9f5cf6470f4573f8e68" exitCode=0 Sep 29 19:18:31 crc kubenswrapper[4792]: I0929 19:18:31.974327 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Sep 29 19:18:31 crc kubenswrapper[4792]: I0929 19:18:31.974364 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"601d6005-0364-43d4-89bc-ecdc60ef2bd9","Type":"ContainerDied","Data":"736641be11ff3754e43241afbc4740b975c411cfdfccb9f5cf6470f4573f8e68"} Sep 29 19:18:31 crc kubenswrapper[4792]: I0929 19:18:31.974399 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"601d6005-0364-43d4-89bc-ecdc60ef2bd9","Type":"ContainerDied","Data":"79da213cd71b02b8425d137d735397d0903efbf51d9bf60e5930746557568975"} Sep 29 19:18:31 crc kubenswrapper[4792]: I0929 19:18:31.974417 4792 scope.go:117] "RemoveContainer" containerID="736641be11ff3754e43241afbc4740b975c411cfdfccb9f5cf6470f4573f8e68" Sep 29 19:18:31 crc kubenswrapper[4792]: I0929 19:18:31.978319 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"91353c86-67a7-4198-9391-52f85954178b","Type":"ContainerStarted","Data":"ed532c4335acbfbb86be6f8c93512999da4dfc9f16d8f731b523448f192e7f34"} Sep 29 19:18:31 crc kubenswrapper[4792]: I0929 19:18:31.979842 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"c0cab82e-384f-4a9d-bf43-a72af2f53e79","Type":"ContainerStarted","Data":"c2de6d5b6fbaa344919a8f84c6987fe2b14b4892b9ac0b6e85bf017fee6b8012"} Sep 29 19:18:32 crc kubenswrapper[4792]: I0929 19:18:32.020255 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/601d6005-0364-43d4-89bc-ecdc60ef2bd9-combined-ca-bundle\") pod \"601d6005-0364-43d4-89bc-ecdc60ef2bd9\" (UID: \"601d6005-0364-43d4-89bc-ecdc60ef2bd9\") " Sep 29 19:18:32 crc kubenswrapper[4792]: I0929 19:18:32.020323 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/601d6005-0364-43d4-89bc-ecdc60ef2bd9-logs\") pod \"601d6005-0364-43d4-89bc-ecdc60ef2bd9\" (UID: \"601d6005-0364-43d4-89bc-ecdc60ef2bd9\") " Sep 29 19:18:32 crc kubenswrapper[4792]: I0929 19:18:32.020387 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pk6dc\" (UniqueName: \"kubernetes.io/projected/601d6005-0364-43d4-89bc-ecdc60ef2bd9-kube-api-access-pk6dc\") pod \"601d6005-0364-43d4-89bc-ecdc60ef2bd9\" (UID: \"601d6005-0364-43d4-89bc-ecdc60ef2bd9\") " Sep 29 19:18:32 crc kubenswrapper[4792]: I0929 19:18:32.020497 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/601d6005-0364-43d4-89bc-ecdc60ef2bd9-config-data\") pod \"601d6005-0364-43d4-89bc-ecdc60ef2bd9\" (UID: \"601d6005-0364-43d4-89bc-ecdc60ef2bd9\") " Sep 29 19:18:32 crc kubenswrapper[4792]: I0929 19:18:32.021057 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/601d6005-0364-43d4-89bc-ecdc60ef2bd9-logs" (OuterVolumeSpecName: "logs") pod "601d6005-0364-43d4-89bc-ecdc60ef2bd9" (UID: "601d6005-0364-43d4-89bc-ecdc60ef2bd9"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 19:18:32 crc kubenswrapper[4792]: I0929 19:18:32.024475 4792 scope.go:117] "RemoveContainer" containerID="dbaa48e353014d4ed336ac537103e150b1e3ae0b71d7856a80eb43b5ad77eedf" Sep 29 19:18:32 crc kubenswrapper[4792]: I0929 19:18:32.034269 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/601d6005-0364-43d4-89bc-ecdc60ef2bd9-kube-api-access-pk6dc" (OuterVolumeSpecName: "kube-api-access-pk6dc") pod "601d6005-0364-43d4-89bc-ecdc60ef2bd9" (UID: "601d6005-0364-43d4-89bc-ecdc60ef2bd9"). InnerVolumeSpecName "kube-api-access-pk6dc". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:18:32 crc kubenswrapper[4792]: I0929 19:18:32.066111 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/601d6005-0364-43d4-89bc-ecdc60ef2bd9-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "601d6005-0364-43d4-89bc-ecdc60ef2bd9" (UID: "601d6005-0364-43d4-89bc-ecdc60ef2bd9"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:18:32 crc kubenswrapper[4792]: I0929 19:18:32.078247 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/601d6005-0364-43d4-89bc-ecdc60ef2bd9-config-data" (OuterVolumeSpecName: "config-data") pod "601d6005-0364-43d4-89bc-ecdc60ef2bd9" (UID: "601d6005-0364-43d4-89bc-ecdc60ef2bd9"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:18:32 crc kubenswrapper[4792]: I0929 19:18:32.110624 4792 scope.go:117] "RemoveContainer" containerID="736641be11ff3754e43241afbc4740b975c411cfdfccb9f5cf6470f4573f8e68" Sep 29 19:18:32 crc kubenswrapper[4792]: E0929 19:18:32.111164 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"736641be11ff3754e43241afbc4740b975c411cfdfccb9f5cf6470f4573f8e68\": container with ID starting with 736641be11ff3754e43241afbc4740b975c411cfdfccb9f5cf6470f4573f8e68 not found: ID does not exist" containerID="736641be11ff3754e43241afbc4740b975c411cfdfccb9f5cf6470f4573f8e68" Sep 29 19:18:32 crc kubenswrapper[4792]: I0929 19:18:32.111207 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"736641be11ff3754e43241afbc4740b975c411cfdfccb9f5cf6470f4573f8e68"} err="failed to get container status \"736641be11ff3754e43241afbc4740b975c411cfdfccb9f5cf6470f4573f8e68\": rpc error: code = NotFound desc = could not find container \"736641be11ff3754e43241afbc4740b975c411cfdfccb9f5cf6470f4573f8e68\": container with ID starting with 736641be11ff3754e43241afbc4740b975c411cfdfccb9f5cf6470f4573f8e68 not found: ID does not exist" Sep 29 19:18:32 crc kubenswrapper[4792]: I0929 19:18:32.111227 4792 scope.go:117] "RemoveContainer" containerID="dbaa48e353014d4ed336ac537103e150b1e3ae0b71d7856a80eb43b5ad77eedf" Sep 29 19:18:32 crc kubenswrapper[4792]: E0929 19:18:32.111556 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"dbaa48e353014d4ed336ac537103e150b1e3ae0b71d7856a80eb43b5ad77eedf\": container with ID starting with dbaa48e353014d4ed336ac537103e150b1e3ae0b71d7856a80eb43b5ad77eedf not found: ID does not exist" containerID="dbaa48e353014d4ed336ac537103e150b1e3ae0b71d7856a80eb43b5ad77eedf" Sep 29 19:18:32 crc kubenswrapper[4792]: I0929 19:18:32.111580 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"dbaa48e353014d4ed336ac537103e150b1e3ae0b71d7856a80eb43b5ad77eedf"} err="failed to get container status \"dbaa48e353014d4ed336ac537103e150b1e3ae0b71d7856a80eb43b5ad77eedf\": rpc error: code = NotFound desc = could not find container \"dbaa48e353014d4ed336ac537103e150b1e3ae0b71d7856a80eb43b5ad77eedf\": container with ID starting with dbaa48e353014d4ed336ac537103e150b1e3ae0b71d7856a80eb43b5ad77eedf not found: ID does not exist" Sep 29 19:18:32 crc kubenswrapper[4792]: I0929 19:18:32.122227 4792 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/601d6005-0364-43d4-89bc-ecdc60ef2bd9-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 19:18:32 crc kubenswrapper[4792]: I0929 19:18:32.122253 4792 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/601d6005-0364-43d4-89bc-ecdc60ef2bd9-logs\") on node \"crc\" DevicePath \"\"" Sep 29 19:18:32 crc kubenswrapper[4792]: I0929 19:18:32.122263 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pk6dc\" (UniqueName: \"kubernetes.io/projected/601d6005-0364-43d4-89bc-ecdc60ef2bd9-kube-api-access-pk6dc\") on node \"crc\" DevicePath \"\"" Sep 29 19:18:32 crc kubenswrapper[4792]: I0929 19:18:32.122273 4792 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/601d6005-0364-43d4-89bc-ecdc60ef2bd9-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 19:18:32 crc kubenswrapper[4792]: I0929 19:18:32.312593 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Sep 29 19:18:32 crc kubenswrapper[4792]: I0929 19:18:32.319874 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Sep 29 19:18:32 crc kubenswrapper[4792]: I0929 19:18:32.340385 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Sep 29 19:18:32 crc kubenswrapper[4792]: E0929 19:18:32.340765 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="601d6005-0364-43d4-89bc-ecdc60ef2bd9" containerName="nova-api-api" Sep 29 19:18:32 crc kubenswrapper[4792]: I0929 19:18:32.340783 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="601d6005-0364-43d4-89bc-ecdc60ef2bd9" containerName="nova-api-api" Sep 29 19:18:32 crc kubenswrapper[4792]: E0929 19:18:32.340809 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="601d6005-0364-43d4-89bc-ecdc60ef2bd9" containerName="nova-api-log" Sep 29 19:18:32 crc kubenswrapper[4792]: I0929 19:18:32.340815 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="601d6005-0364-43d4-89bc-ecdc60ef2bd9" containerName="nova-api-log" Sep 29 19:18:32 crc kubenswrapper[4792]: I0929 19:18:32.341003 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="601d6005-0364-43d4-89bc-ecdc60ef2bd9" containerName="nova-api-log" Sep 29 19:18:32 crc kubenswrapper[4792]: I0929 19:18:32.341045 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="601d6005-0364-43d4-89bc-ecdc60ef2bd9" containerName="nova-api-api" Sep 29 19:18:32 crc kubenswrapper[4792]: I0929 19:18:32.342176 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Sep 29 19:18:32 crc kubenswrapper[4792]: I0929 19:18:32.344971 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Sep 29 19:18:32 crc kubenswrapper[4792]: I0929 19:18:32.353619 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Sep 29 19:18:32 crc kubenswrapper[4792]: I0929 19:18:32.426958 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5d563b15-07e2-4115-b1fa-d397717d62cd-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"5d563b15-07e2-4115-b1fa-d397717d62cd\") " pod="openstack/nova-api-0" Sep 29 19:18:32 crc kubenswrapper[4792]: I0929 19:18:32.427030 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5d563b15-07e2-4115-b1fa-d397717d62cd-config-data\") pod \"nova-api-0\" (UID: \"5d563b15-07e2-4115-b1fa-d397717d62cd\") " pod="openstack/nova-api-0" Sep 29 19:18:32 crc kubenswrapper[4792]: I0929 19:18:32.427130 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5d563b15-07e2-4115-b1fa-d397717d62cd-logs\") pod \"nova-api-0\" (UID: \"5d563b15-07e2-4115-b1fa-d397717d62cd\") " pod="openstack/nova-api-0" Sep 29 19:18:32 crc kubenswrapper[4792]: I0929 19:18:32.427450 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cs8vc\" (UniqueName: \"kubernetes.io/projected/5d563b15-07e2-4115-b1fa-d397717d62cd-kube-api-access-cs8vc\") pod \"nova-api-0\" (UID: \"5d563b15-07e2-4115-b1fa-d397717d62cd\") " pod="openstack/nova-api-0" Sep 29 19:18:32 crc kubenswrapper[4792]: I0929 19:18:32.528955 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cs8vc\" (UniqueName: \"kubernetes.io/projected/5d563b15-07e2-4115-b1fa-d397717d62cd-kube-api-access-cs8vc\") pod \"nova-api-0\" (UID: \"5d563b15-07e2-4115-b1fa-d397717d62cd\") " pod="openstack/nova-api-0" Sep 29 19:18:32 crc kubenswrapper[4792]: I0929 19:18:32.529278 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5d563b15-07e2-4115-b1fa-d397717d62cd-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"5d563b15-07e2-4115-b1fa-d397717d62cd\") " pod="openstack/nova-api-0" Sep 29 19:18:32 crc kubenswrapper[4792]: I0929 19:18:32.529322 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5d563b15-07e2-4115-b1fa-d397717d62cd-config-data\") pod \"nova-api-0\" (UID: \"5d563b15-07e2-4115-b1fa-d397717d62cd\") " pod="openstack/nova-api-0" Sep 29 19:18:32 crc kubenswrapper[4792]: I0929 19:18:32.529400 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5d563b15-07e2-4115-b1fa-d397717d62cd-logs\") pod \"nova-api-0\" (UID: \"5d563b15-07e2-4115-b1fa-d397717d62cd\") " pod="openstack/nova-api-0" Sep 29 19:18:32 crc kubenswrapper[4792]: I0929 19:18:32.529893 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5d563b15-07e2-4115-b1fa-d397717d62cd-logs\") pod \"nova-api-0\" (UID: \"5d563b15-07e2-4115-b1fa-d397717d62cd\") " pod="openstack/nova-api-0" Sep 29 19:18:32 crc kubenswrapper[4792]: I0929 19:18:32.533565 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5d563b15-07e2-4115-b1fa-d397717d62cd-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"5d563b15-07e2-4115-b1fa-d397717d62cd\") " pod="openstack/nova-api-0" Sep 29 19:18:32 crc kubenswrapper[4792]: I0929 19:18:32.533602 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5d563b15-07e2-4115-b1fa-d397717d62cd-config-data\") pod \"nova-api-0\" (UID: \"5d563b15-07e2-4115-b1fa-d397717d62cd\") " pod="openstack/nova-api-0" Sep 29 19:18:32 crc kubenswrapper[4792]: I0929 19:18:32.545808 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cs8vc\" (UniqueName: \"kubernetes.io/projected/5d563b15-07e2-4115-b1fa-d397717d62cd-kube-api-access-cs8vc\") pod \"nova-api-0\" (UID: \"5d563b15-07e2-4115-b1fa-d397717d62cd\") " pod="openstack/nova-api-0" Sep 29 19:18:32 crc kubenswrapper[4792]: I0929 19:18:32.657627 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Sep 29 19:18:32 crc kubenswrapper[4792]: I0929 19:18:32.996004 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"91353c86-67a7-4198-9391-52f85954178b","Type":"ContainerStarted","Data":"c49501a3bbf3d20523fabdfdf6904f4a91a81183311bff628cb251440721d573"} Sep 29 19:18:33 crc kubenswrapper[4792]: I0929 19:18:33.000019 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"c0cab82e-384f-4a9d-bf43-a72af2f53e79","Type":"ContainerStarted","Data":"d3df929e2eb7f3131e1c1a1f54de0eee7f53e1a2366eb322351f8913f816143f"} Sep 29 19:18:33 crc kubenswrapper[4792]: I0929 19:18:33.039292 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="32dbf736-0a8a-4dfc-a3e3-ae36c3b2aa54" path="/var/lib/kubelet/pods/32dbf736-0a8a-4dfc-a3e3-ae36c3b2aa54/volumes" Sep 29 19:18:33 crc kubenswrapper[4792]: I0929 19:18:33.040591 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="601d6005-0364-43d4-89bc-ecdc60ef2bd9" path="/var/lib/kubelet/pods/601d6005-0364-43d4-89bc-ecdc60ef2bd9/volumes" Sep 29 19:18:33 crc kubenswrapper[4792]: I0929 19:18:33.160404 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=3.160388404 podStartE2EDuration="3.160388404s" podCreationTimestamp="2025-09-29 19:18:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 19:18:33.042158522 +0000 UTC m=+1325.035465938" watchObservedRunningTime="2025-09-29 19:18:33.160388404 +0000 UTC m=+1325.153695800" Sep 29 19:18:33 crc kubenswrapper[4792]: I0929 19:18:33.166359 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Sep 29 19:18:34 crc kubenswrapper[4792]: I0929 19:18:34.010845 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"91353c86-67a7-4198-9391-52f85954178b","Type":"ContainerStarted","Data":"499214427f97d1a773a82d897fa75d48133af35219c8be27e1de5e4152297a40"} Sep 29 19:18:34 crc kubenswrapper[4792]: I0929 19:18:34.013475 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"5d563b15-07e2-4115-b1fa-d397717d62cd","Type":"ContainerStarted","Data":"66a2fb9afdf9489165d821300f81faadb832359e6f2c63753284156b550ac357"} Sep 29 19:18:34 crc kubenswrapper[4792]: I0929 19:18:34.013526 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"5d563b15-07e2-4115-b1fa-d397717d62cd","Type":"ContainerStarted","Data":"9554ad31091d048147a1361f625613950b33dd3cf15f712af4d78c09030ea0df"} Sep 29 19:18:34 crc kubenswrapper[4792]: I0929 19:18:34.013546 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"5d563b15-07e2-4115-b1fa-d397717d62cd","Type":"ContainerStarted","Data":"792b9b5913edd2ada2f518fb6f9ba7b6a727deccde7ed3f1b29a6c795d3d6b2a"} Sep 29 19:18:34 crc kubenswrapper[4792]: I0929 19:18:34.056256 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.056233589 podStartE2EDuration="2.056233589s" podCreationTimestamp="2025-09-29 19:18:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 19:18:34.041485886 +0000 UTC m=+1326.034793292" watchObservedRunningTime="2025-09-29 19:18:34.056233589 +0000 UTC m=+1326.049541005" Sep 29 19:18:36 crc kubenswrapper[4792]: I0929 19:18:36.314394 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell1-conductor-0" Sep 29 19:18:36 crc kubenswrapper[4792]: I0929 19:18:36.354027 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Sep 29 19:18:36 crc kubenswrapper[4792]: I0929 19:18:36.568415 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/kube-state-metrics-0" Sep 29 19:18:37 crc kubenswrapper[4792]: I0929 19:18:37.057390 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"91353c86-67a7-4198-9391-52f85954178b","Type":"ContainerStarted","Data":"15bfd0deb8a8a98c16ef7559560742f5fd68ac05144c5c97b768c09b64ef6f0e"} Sep 29 19:18:37 crc kubenswrapper[4792]: I0929 19:18:37.057559 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Sep 29 19:18:37 crc kubenswrapper[4792]: I0929 19:18:37.085372 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.545955551 podStartE2EDuration="8.085352097s" podCreationTimestamp="2025-09-29 19:18:29 +0000 UTC" firstStartedPulling="2025-09-29 19:18:30.902297108 +0000 UTC m=+1322.895604504" lastFinishedPulling="2025-09-29 19:18:36.441693654 +0000 UTC m=+1328.435001050" observedRunningTime="2025-09-29 19:18:37.076020744 +0000 UTC m=+1329.069328160" watchObservedRunningTime="2025-09-29 19:18:37.085352097 +0000 UTC m=+1329.078659493" Sep 29 19:18:41 crc kubenswrapper[4792]: I0929 19:18:41.354486 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Sep 29 19:18:41 crc kubenswrapper[4792]: I0929 19:18:41.383005 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Sep 29 19:18:42 crc kubenswrapper[4792]: I0929 19:18:42.130954 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Sep 29 19:18:42 crc kubenswrapper[4792]: I0929 19:18:42.658956 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Sep 29 19:18:42 crc kubenswrapper[4792]: I0929 19:18:42.659012 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Sep 29 19:18:43 crc kubenswrapper[4792]: I0929 19:18:43.742108 4792 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="5d563b15-07e2-4115-b1fa-d397717d62cd" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.0.197:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Sep 29 19:18:43 crc kubenswrapper[4792]: I0929 19:18:43.742266 4792 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="5d563b15-07e2-4115-b1fa-d397717d62cd" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.0.197:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Sep 29 19:18:48 crc kubenswrapper[4792]: E0929 19:18:48.693644 4792 manager.go:1116] Failed to create existing container: /kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod32dbf736_0a8a_4dfc_a3e3_ae36c3b2aa54.slice/crio-5ae2922fca1f8ffbbd0ad5e6328b2eaa58ce78524496bcd89144b93ad87ac513: Error finding container 5ae2922fca1f8ffbbd0ad5e6328b2eaa58ce78524496bcd89144b93ad87ac513: Status 404 returned error can't find the container with id 5ae2922fca1f8ffbbd0ad5e6328b2eaa58ce78524496bcd89144b93ad87ac513 Sep 29 19:18:48 crc kubenswrapper[4792]: E0929 19:18:48.960033 4792 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd967b8db_0961_4897_a859_f1d46d30dba8.slice/crio-conmon-1081e48c669d84e83b06df9d9d775cc07869a324aa1810d929d32bac72816f3f.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd967b8db_0961_4897_a859_f1d46d30dba8.slice/crio-1081e48c669d84e83b06df9d9d775cc07869a324aa1810d929d32bac72816f3f.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod98f4b269_3703_47ff_826a_f9f4f9ac5a98.slice/crio-conmon-b7dd0ce8ce59435410436b2e79bfbdcdeded61430c1bb76f80c2feaf8a3be826.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod98f4b269_3703_47ff_826a_f9f4f9ac5a98.slice/crio-b7dd0ce8ce59435410436b2e79bfbdcdeded61430c1bb76f80c2feaf8a3be826.scope\": RecentStats: unable to find data in memory cache]" Sep 29 19:18:49 crc kubenswrapper[4792]: I0929 19:18:49.167432 4792 generic.go:334] "Generic (PLEG): container finished" podID="98f4b269-3703-47ff-826a-f9f4f9ac5a98" containerID="b7dd0ce8ce59435410436b2e79bfbdcdeded61430c1bb76f80c2feaf8a3be826" exitCode=137 Sep 29 19:18:49 crc kubenswrapper[4792]: I0929 19:18:49.167495 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"98f4b269-3703-47ff-826a-f9f4f9ac5a98","Type":"ContainerDied","Data":"b7dd0ce8ce59435410436b2e79bfbdcdeded61430c1bb76f80c2feaf8a3be826"} Sep 29 19:18:49 crc kubenswrapper[4792]: I0929 19:18:49.167520 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"98f4b269-3703-47ff-826a-f9f4f9ac5a98","Type":"ContainerDied","Data":"a82dfbcff16297f48390c2808c6f8ebc6e24e11cc9802a6753d7a349adbb22ab"} Sep 29 19:18:49 crc kubenswrapper[4792]: I0929 19:18:49.167530 4792 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a82dfbcff16297f48390c2808c6f8ebc6e24e11cc9802a6753d7a349adbb22ab" Sep 29 19:18:49 crc kubenswrapper[4792]: I0929 19:18:49.168664 4792 generic.go:334] "Generic (PLEG): container finished" podID="d967b8db-0961-4897-a859-f1d46d30dba8" containerID="1081e48c669d84e83b06df9d9d775cc07869a324aa1810d929d32bac72816f3f" exitCode=137 Sep 29 19:18:49 crc kubenswrapper[4792]: I0929 19:18:49.168686 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"d967b8db-0961-4897-a859-f1d46d30dba8","Type":"ContainerDied","Data":"1081e48c669d84e83b06df9d9d775cc07869a324aa1810d929d32bac72816f3f"} Sep 29 19:18:49 crc kubenswrapper[4792]: I0929 19:18:49.168701 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"d967b8db-0961-4897-a859-f1d46d30dba8","Type":"ContainerDied","Data":"38a33ad11a6f9bc3f846aaae0c9c4ec1e7cb5595bb982db5349af63d0017b67c"} Sep 29 19:18:49 crc kubenswrapper[4792]: I0929 19:18:49.168711 4792 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="38a33ad11a6f9bc3f846aaae0c9c4ec1e7cb5595bb982db5349af63d0017b67c" Sep 29 19:18:49 crc kubenswrapper[4792]: I0929 19:18:49.173386 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Sep 29 19:18:49 crc kubenswrapper[4792]: I0929 19:18:49.181278 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Sep 29 19:18:49 crc kubenswrapper[4792]: I0929 19:18:49.345363 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-c6pt2\" (UniqueName: \"kubernetes.io/projected/d967b8db-0961-4897-a859-f1d46d30dba8-kube-api-access-c6pt2\") pod \"d967b8db-0961-4897-a859-f1d46d30dba8\" (UID: \"d967b8db-0961-4897-a859-f1d46d30dba8\") " Sep 29 19:18:49 crc kubenswrapper[4792]: I0929 19:18:49.345397 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/98f4b269-3703-47ff-826a-f9f4f9ac5a98-logs\") pod \"98f4b269-3703-47ff-826a-f9f4f9ac5a98\" (UID: \"98f4b269-3703-47ff-826a-f9f4f9ac5a98\") " Sep 29 19:18:49 crc kubenswrapper[4792]: I0929 19:18:49.345422 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/98f4b269-3703-47ff-826a-f9f4f9ac5a98-config-data\") pod \"98f4b269-3703-47ff-826a-f9f4f9ac5a98\" (UID: \"98f4b269-3703-47ff-826a-f9f4f9ac5a98\") " Sep 29 19:18:49 crc kubenswrapper[4792]: I0929 19:18:49.345458 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d967b8db-0961-4897-a859-f1d46d30dba8-config-data\") pod \"d967b8db-0961-4897-a859-f1d46d30dba8\" (UID: \"d967b8db-0961-4897-a859-f1d46d30dba8\") " Sep 29 19:18:49 crc kubenswrapper[4792]: I0929 19:18:49.345492 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d967b8db-0961-4897-a859-f1d46d30dba8-combined-ca-bundle\") pod \"d967b8db-0961-4897-a859-f1d46d30dba8\" (UID: \"d967b8db-0961-4897-a859-f1d46d30dba8\") " Sep 29 19:18:49 crc kubenswrapper[4792]: I0929 19:18:49.345551 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ddjzp\" (UniqueName: \"kubernetes.io/projected/98f4b269-3703-47ff-826a-f9f4f9ac5a98-kube-api-access-ddjzp\") pod \"98f4b269-3703-47ff-826a-f9f4f9ac5a98\" (UID: \"98f4b269-3703-47ff-826a-f9f4f9ac5a98\") " Sep 29 19:18:49 crc kubenswrapper[4792]: I0929 19:18:49.345660 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/98f4b269-3703-47ff-826a-f9f4f9ac5a98-combined-ca-bundle\") pod \"98f4b269-3703-47ff-826a-f9f4f9ac5a98\" (UID: \"98f4b269-3703-47ff-826a-f9f4f9ac5a98\") " Sep 29 19:18:49 crc kubenswrapper[4792]: I0929 19:18:49.345782 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/98f4b269-3703-47ff-826a-f9f4f9ac5a98-logs" (OuterVolumeSpecName: "logs") pod "98f4b269-3703-47ff-826a-f9f4f9ac5a98" (UID: "98f4b269-3703-47ff-826a-f9f4f9ac5a98"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 19:18:49 crc kubenswrapper[4792]: I0929 19:18:49.346090 4792 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/98f4b269-3703-47ff-826a-f9f4f9ac5a98-logs\") on node \"crc\" DevicePath \"\"" Sep 29 19:18:49 crc kubenswrapper[4792]: I0929 19:18:49.352386 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d967b8db-0961-4897-a859-f1d46d30dba8-kube-api-access-c6pt2" (OuterVolumeSpecName: "kube-api-access-c6pt2") pod "d967b8db-0961-4897-a859-f1d46d30dba8" (UID: "d967b8db-0961-4897-a859-f1d46d30dba8"). InnerVolumeSpecName "kube-api-access-c6pt2". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:18:49 crc kubenswrapper[4792]: I0929 19:18:49.352719 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/98f4b269-3703-47ff-826a-f9f4f9ac5a98-kube-api-access-ddjzp" (OuterVolumeSpecName: "kube-api-access-ddjzp") pod "98f4b269-3703-47ff-826a-f9f4f9ac5a98" (UID: "98f4b269-3703-47ff-826a-f9f4f9ac5a98"). InnerVolumeSpecName "kube-api-access-ddjzp". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:18:49 crc kubenswrapper[4792]: E0929 19:18:49.375410 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/98f4b269-3703-47ff-826a-f9f4f9ac5a98-combined-ca-bundle podName:98f4b269-3703-47ff-826a-f9f4f9ac5a98 nodeName:}" failed. No retries permitted until 2025-09-29 19:18:49.875383539 +0000 UTC m=+1341.868690935 (durationBeforeRetry 500ms). Error: error cleaning subPath mounts for volume "combined-ca-bundle" (UniqueName: "kubernetes.io/secret/98f4b269-3703-47ff-826a-f9f4f9ac5a98-combined-ca-bundle") pod "98f4b269-3703-47ff-826a-f9f4f9ac5a98" (UID: "98f4b269-3703-47ff-826a-f9f4f9ac5a98") : error deleting /var/lib/kubelet/pods/98f4b269-3703-47ff-826a-f9f4f9ac5a98/volume-subpaths: remove /var/lib/kubelet/pods/98f4b269-3703-47ff-826a-f9f4f9ac5a98/volume-subpaths: no such file or directory Sep 29 19:18:49 crc kubenswrapper[4792]: I0929 19:18:49.378117 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/98f4b269-3703-47ff-826a-f9f4f9ac5a98-config-data" (OuterVolumeSpecName: "config-data") pod "98f4b269-3703-47ff-826a-f9f4f9ac5a98" (UID: "98f4b269-3703-47ff-826a-f9f4f9ac5a98"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:18:49 crc kubenswrapper[4792]: I0929 19:18:49.379553 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d967b8db-0961-4897-a859-f1d46d30dba8-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "d967b8db-0961-4897-a859-f1d46d30dba8" (UID: "d967b8db-0961-4897-a859-f1d46d30dba8"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:18:49 crc kubenswrapper[4792]: I0929 19:18:49.379957 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d967b8db-0961-4897-a859-f1d46d30dba8-config-data" (OuterVolumeSpecName: "config-data") pod "d967b8db-0961-4897-a859-f1d46d30dba8" (UID: "d967b8db-0961-4897-a859-f1d46d30dba8"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:18:49 crc kubenswrapper[4792]: I0929 19:18:49.447860 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-c6pt2\" (UniqueName: \"kubernetes.io/projected/d967b8db-0961-4897-a859-f1d46d30dba8-kube-api-access-c6pt2\") on node \"crc\" DevicePath \"\"" Sep 29 19:18:49 crc kubenswrapper[4792]: I0929 19:18:49.447894 4792 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/98f4b269-3703-47ff-826a-f9f4f9ac5a98-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 19:18:49 crc kubenswrapper[4792]: I0929 19:18:49.447909 4792 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d967b8db-0961-4897-a859-f1d46d30dba8-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 19:18:49 crc kubenswrapper[4792]: I0929 19:18:49.447920 4792 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d967b8db-0961-4897-a859-f1d46d30dba8-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 19:18:49 crc kubenswrapper[4792]: I0929 19:18:49.447931 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ddjzp\" (UniqueName: \"kubernetes.io/projected/98f4b269-3703-47ff-826a-f9f4f9ac5a98-kube-api-access-ddjzp\") on node \"crc\" DevicePath \"\"" Sep 29 19:18:49 crc kubenswrapper[4792]: I0929 19:18:49.958139 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/98f4b269-3703-47ff-826a-f9f4f9ac5a98-combined-ca-bundle\") pod \"98f4b269-3703-47ff-826a-f9f4f9ac5a98\" (UID: \"98f4b269-3703-47ff-826a-f9f4f9ac5a98\") " Sep 29 19:18:49 crc kubenswrapper[4792]: I0929 19:18:49.961629 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/98f4b269-3703-47ff-826a-f9f4f9ac5a98-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "98f4b269-3703-47ff-826a-f9f4f9ac5a98" (UID: "98f4b269-3703-47ff-826a-f9f4f9ac5a98"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:18:50 crc kubenswrapper[4792]: I0929 19:18:50.059830 4792 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/98f4b269-3703-47ff-826a-f9f4f9ac5a98-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 19:18:50 crc kubenswrapper[4792]: I0929 19:18:50.175630 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Sep 29 19:18:50 crc kubenswrapper[4792]: I0929 19:18:50.175648 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Sep 29 19:18:50 crc kubenswrapper[4792]: I0929 19:18:50.213497 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Sep 29 19:18:50 crc kubenswrapper[4792]: I0929 19:18:50.229081 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Sep 29 19:18:50 crc kubenswrapper[4792]: I0929 19:18:50.256106 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Sep 29 19:18:50 crc kubenswrapper[4792]: E0929 19:18:50.256483 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="98f4b269-3703-47ff-826a-f9f4f9ac5a98" containerName="nova-metadata-log" Sep 29 19:18:50 crc kubenswrapper[4792]: I0929 19:18:50.256501 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="98f4b269-3703-47ff-826a-f9f4f9ac5a98" containerName="nova-metadata-log" Sep 29 19:18:50 crc kubenswrapper[4792]: E0929 19:18:50.256530 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="98f4b269-3703-47ff-826a-f9f4f9ac5a98" containerName="nova-metadata-metadata" Sep 29 19:18:50 crc kubenswrapper[4792]: I0929 19:18:50.256539 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="98f4b269-3703-47ff-826a-f9f4f9ac5a98" containerName="nova-metadata-metadata" Sep 29 19:18:50 crc kubenswrapper[4792]: E0929 19:18:50.256551 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d967b8db-0961-4897-a859-f1d46d30dba8" containerName="nova-cell1-novncproxy-novncproxy" Sep 29 19:18:50 crc kubenswrapper[4792]: I0929 19:18:50.256558 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="d967b8db-0961-4897-a859-f1d46d30dba8" containerName="nova-cell1-novncproxy-novncproxy" Sep 29 19:18:50 crc kubenswrapper[4792]: I0929 19:18:50.256755 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="98f4b269-3703-47ff-826a-f9f4f9ac5a98" containerName="nova-metadata-log" Sep 29 19:18:50 crc kubenswrapper[4792]: I0929 19:18:50.256781 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="d967b8db-0961-4897-a859-f1d46d30dba8" containerName="nova-cell1-novncproxy-novncproxy" Sep 29 19:18:50 crc kubenswrapper[4792]: I0929 19:18:50.256796 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="98f4b269-3703-47ff-826a-f9f4f9ac5a98" containerName="nova-metadata-metadata" Sep 29 19:18:50 crc kubenswrapper[4792]: I0929 19:18:50.257336 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Sep 29 19:18:50 crc kubenswrapper[4792]: I0929 19:18:50.257363 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Sep 29 19:18:50 crc kubenswrapper[4792]: I0929 19:18:50.257434 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Sep 29 19:18:50 crc kubenswrapper[4792]: I0929 19:18:50.281645 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-novncproxy-config-data" Sep 29 19:18:50 crc kubenswrapper[4792]: I0929 19:18:50.283414 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-novncproxy-cell1-vencrypt" Sep 29 19:18:50 crc kubenswrapper[4792]: I0929 19:18:50.285112 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Sep 29 19:18:50 crc kubenswrapper[4792]: I0929 19:18:50.285383 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-novncproxy-cell1-public-svc" Sep 29 19:18:50 crc kubenswrapper[4792]: I0929 19:18:50.323978 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Sep 29 19:18:50 crc kubenswrapper[4792]: I0929 19:18:50.332897 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Sep 29 19:18:50 crc kubenswrapper[4792]: I0929 19:18:50.340707 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Sep 29 19:18:50 crc kubenswrapper[4792]: I0929 19:18:50.341365 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Sep 29 19:18:50 crc kubenswrapper[4792]: I0929 19:18:50.354513 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Sep 29 19:18:50 crc kubenswrapper[4792]: I0929 19:18:50.383010 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/84ede8b3-cba9-4133-bfce-14f44cba07b8-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"84ede8b3-cba9-4133-bfce-14f44cba07b8\") " pod="openstack/nova-cell1-novncproxy-0" Sep 29 19:18:50 crc kubenswrapper[4792]: I0929 19:18:50.383735 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m7tcg\" (UniqueName: \"kubernetes.io/projected/84ede8b3-cba9-4133-bfce-14f44cba07b8-kube-api-access-m7tcg\") pod \"nova-cell1-novncproxy-0\" (UID: \"84ede8b3-cba9-4133-bfce-14f44cba07b8\") " pod="openstack/nova-cell1-novncproxy-0" Sep 29 19:18:50 crc kubenswrapper[4792]: I0929 19:18:50.383776 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/84ede8b3-cba9-4133-bfce-14f44cba07b8-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"84ede8b3-cba9-4133-bfce-14f44cba07b8\") " pod="openstack/nova-cell1-novncproxy-0" Sep 29 19:18:50 crc kubenswrapper[4792]: I0929 19:18:50.383811 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/84ede8b3-cba9-4133-bfce-14f44cba07b8-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"84ede8b3-cba9-4133-bfce-14f44cba07b8\") " pod="openstack/nova-cell1-novncproxy-0" Sep 29 19:18:50 crc kubenswrapper[4792]: I0929 19:18:50.383953 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/84ede8b3-cba9-4133-bfce-14f44cba07b8-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"84ede8b3-cba9-4133-bfce-14f44cba07b8\") " pod="openstack/nova-cell1-novncproxy-0" Sep 29 19:18:50 crc kubenswrapper[4792]: I0929 19:18:50.485900 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m7tcg\" (UniqueName: \"kubernetes.io/projected/84ede8b3-cba9-4133-bfce-14f44cba07b8-kube-api-access-m7tcg\") pod \"nova-cell1-novncproxy-0\" (UID: \"84ede8b3-cba9-4133-bfce-14f44cba07b8\") " pod="openstack/nova-cell1-novncproxy-0" Sep 29 19:18:50 crc kubenswrapper[4792]: I0929 19:18:50.485962 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/84ede8b3-cba9-4133-bfce-14f44cba07b8-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"84ede8b3-cba9-4133-bfce-14f44cba07b8\") " pod="openstack/nova-cell1-novncproxy-0" Sep 29 19:18:50 crc kubenswrapper[4792]: I0929 19:18:50.486000 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/84ede8b3-cba9-4133-bfce-14f44cba07b8-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"84ede8b3-cba9-4133-bfce-14f44cba07b8\") " pod="openstack/nova-cell1-novncproxy-0" Sep 29 19:18:50 crc kubenswrapper[4792]: I0929 19:18:50.486055 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/303192e4-458c-4544-84a3-a49abf8a0be1-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"303192e4-458c-4544-84a3-a49abf8a0be1\") " pod="openstack/nova-metadata-0" Sep 29 19:18:50 crc kubenswrapper[4792]: I0929 19:18:50.486083 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w22p6\" (UniqueName: \"kubernetes.io/projected/303192e4-458c-4544-84a3-a49abf8a0be1-kube-api-access-w22p6\") pod \"nova-metadata-0\" (UID: \"303192e4-458c-4544-84a3-a49abf8a0be1\") " pod="openstack/nova-metadata-0" Sep 29 19:18:50 crc kubenswrapper[4792]: I0929 19:18:50.486115 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/303192e4-458c-4544-84a3-a49abf8a0be1-config-data\") pod \"nova-metadata-0\" (UID: \"303192e4-458c-4544-84a3-a49abf8a0be1\") " pod="openstack/nova-metadata-0" Sep 29 19:18:50 crc kubenswrapper[4792]: I0929 19:18:50.488628 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/84ede8b3-cba9-4133-bfce-14f44cba07b8-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"84ede8b3-cba9-4133-bfce-14f44cba07b8\") " pod="openstack/nova-cell1-novncproxy-0" Sep 29 19:18:50 crc kubenswrapper[4792]: I0929 19:18:50.488731 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/303192e4-458c-4544-84a3-a49abf8a0be1-logs\") pod \"nova-metadata-0\" (UID: \"303192e4-458c-4544-84a3-a49abf8a0be1\") " pod="openstack/nova-metadata-0" Sep 29 19:18:50 crc kubenswrapper[4792]: I0929 19:18:50.488768 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/84ede8b3-cba9-4133-bfce-14f44cba07b8-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"84ede8b3-cba9-4133-bfce-14f44cba07b8\") " pod="openstack/nova-cell1-novncproxy-0" Sep 29 19:18:50 crc kubenswrapper[4792]: I0929 19:18:50.488814 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/303192e4-458c-4544-84a3-a49abf8a0be1-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"303192e4-458c-4544-84a3-a49abf8a0be1\") " pod="openstack/nova-metadata-0" Sep 29 19:18:50 crc kubenswrapper[4792]: I0929 19:18:50.490315 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/84ede8b3-cba9-4133-bfce-14f44cba07b8-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"84ede8b3-cba9-4133-bfce-14f44cba07b8\") " pod="openstack/nova-cell1-novncproxy-0" Sep 29 19:18:50 crc kubenswrapper[4792]: I0929 19:18:50.493255 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/84ede8b3-cba9-4133-bfce-14f44cba07b8-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"84ede8b3-cba9-4133-bfce-14f44cba07b8\") " pod="openstack/nova-cell1-novncproxy-0" Sep 29 19:18:50 crc kubenswrapper[4792]: I0929 19:18:50.493694 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/84ede8b3-cba9-4133-bfce-14f44cba07b8-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"84ede8b3-cba9-4133-bfce-14f44cba07b8\") " pod="openstack/nova-cell1-novncproxy-0" Sep 29 19:18:50 crc kubenswrapper[4792]: I0929 19:18:50.494652 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/84ede8b3-cba9-4133-bfce-14f44cba07b8-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"84ede8b3-cba9-4133-bfce-14f44cba07b8\") " pod="openstack/nova-cell1-novncproxy-0" Sep 29 19:18:50 crc kubenswrapper[4792]: I0929 19:18:50.503898 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m7tcg\" (UniqueName: \"kubernetes.io/projected/84ede8b3-cba9-4133-bfce-14f44cba07b8-kube-api-access-m7tcg\") pod \"nova-cell1-novncproxy-0\" (UID: \"84ede8b3-cba9-4133-bfce-14f44cba07b8\") " pod="openstack/nova-cell1-novncproxy-0" Sep 29 19:18:50 crc kubenswrapper[4792]: I0929 19:18:50.590594 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/303192e4-458c-4544-84a3-a49abf8a0be1-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"303192e4-458c-4544-84a3-a49abf8a0be1\") " pod="openstack/nova-metadata-0" Sep 29 19:18:50 crc kubenswrapper[4792]: I0929 19:18:50.590632 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w22p6\" (UniqueName: \"kubernetes.io/projected/303192e4-458c-4544-84a3-a49abf8a0be1-kube-api-access-w22p6\") pod \"nova-metadata-0\" (UID: \"303192e4-458c-4544-84a3-a49abf8a0be1\") " pod="openstack/nova-metadata-0" Sep 29 19:18:50 crc kubenswrapper[4792]: I0929 19:18:50.590661 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/303192e4-458c-4544-84a3-a49abf8a0be1-config-data\") pod \"nova-metadata-0\" (UID: \"303192e4-458c-4544-84a3-a49abf8a0be1\") " pod="openstack/nova-metadata-0" Sep 29 19:18:50 crc kubenswrapper[4792]: I0929 19:18:50.590728 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/303192e4-458c-4544-84a3-a49abf8a0be1-logs\") pod \"nova-metadata-0\" (UID: \"303192e4-458c-4544-84a3-a49abf8a0be1\") " pod="openstack/nova-metadata-0" Sep 29 19:18:50 crc kubenswrapper[4792]: I0929 19:18:50.590770 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/303192e4-458c-4544-84a3-a49abf8a0be1-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"303192e4-458c-4544-84a3-a49abf8a0be1\") " pod="openstack/nova-metadata-0" Sep 29 19:18:50 crc kubenswrapper[4792]: I0929 19:18:50.592095 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/303192e4-458c-4544-84a3-a49abf8a0be1-logs\") pod \"nova-metadata-0\" (UID: \"303192e4-458c-4544-84a3-a49abf8a0be1\") " pod="openstack/nova-metadata-0" Sep 29 19:18:50 crc kubenswrapper[4792]: I0929 19:18:50.595105 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/303192e4-458c-4544-84a3-a49abf8a0be1-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"303192e4-458c-4544-84a3-a49abf8a0be1\") " pod="openstack/nova-metadata-0" Sep 29 19:18:50 crc kubenswrapper[4792]: I0929 19:18:50.597795 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Sep 29 19:18:50 crc kubenswrapper[4792]: I0929 19:18:50.597943 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/303192e4-458c-4544-84a3-a49abf8a0be1-config-data\") pod \"nova-metadata-0\" (UID: \"303192e4-458c-4544-84a3-a49abf8a0be1\") " pod="openstack/nova-metadata-0" Sep 29 19:18:50 crc kubenswrapper[4792]: I0929 19:18:50.598270 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/303192e4-458c-4544-84a3-a49abf8a0be1-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"303192e4-458c-4544-84a3-a49abf8a0be1\") " pod="openstack/nova-metadata-0" Sep 29 19:18:50 crc kubenswrapper[4792]: I0929 19:18:50.609985 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w22p6\" (UniqueName: \"kubernetes.io/projected/303192e4-458c-4544-84a3-a49abf8a0be1-kube-api-access-w22p6\") pod \"nova-metadata-0\" (UID: \"303192e4-458c-4544-84a3-a49abf8a0be1\") " pod="openstack/nova-metadata-0" Sep 29 19:18:50 crc kubenswrapper[4792]: I0929 19:18:50.670646 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Sep 29 19:18:51 crc kubenswrapper[4792]: I0929 19:18:51.027923 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="98f4b269-3703-47ff-826a-f9f4f9ac5a98" path="/var/lib/kubelet/pods/98f4b269-3703-47ff-826a-f9f4f9ac5a98/volumes" Sep 29 19:18:51 crc kubenswrapper[4792]: I0929 19:18:51.028781 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d967b8db-0961-4897-a859-f1d46d30dba8" path="/var/lib/kubelet/pods/d967b8db-0961-4897-a859-f1d46d30dba8/volumes" Sep 29 19:18:51 crc kubenswrapper[4792]: I0929 19:18:51.064663 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Sep 29 19:18:51 crc kubenswrapper[4792]: I0929 19:18:51.162244 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Sep 29 19:18:51 crc kubenswrapper[4792]: I0929 19:18:51.210276 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"303192e4-458c-4544-84a3-a49abf8a0be1","Type":"ContainerStarted","Data":"6f189aafc58889ea9456cda863e99248b9a98193ca59cd471e60274974783f3a"} Sep 29 19:18:51 crc kubenswrapper[4792]: I0929 19:18:51.217793 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"84ede8b3-cba9-4133-bfce-14f44cba07b8","Type":"ContainerStarted","Data":"23ef640db219779e77bf1a78d1ed19b1a285a0d77a2260e5a7c47ed8230ff22c"} Sep 29 19:18:52 crc kubenswrapper[4792]: I0929 19:18:52.238563 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"303192e4-458c-4544-84a3-a49abf8a0be1","Type":"ContainerStarted","Data":"2287d1ceee27214be3628b422ee71b94da8c72b9fea6e13053ba5b666bd18b7e"} Sep 29 19:18:52 crc kubenswrapper[4792]: I0929 19:18:52.238999 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"303192e4-458c-4544-84a3-a49abf8a0be1","Type":"ContainerStarted","Data":"c15b0219fa9688ec0c1247f582223d9dacb8be3f2fb4182a965c45e38a251938"} Sep 29 19:18:52 crc kubenswrapper[4792]: I0929 19:18:52.248000 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"84ede8b3-cba9-4133-bfce-14f44cba07b8","Type":"ContainerStarted","Data":"e213f77d4f9db5a08615288d5734c3614a7af1e5ae07143f9dcf5f9cebb73bfc"} Sep 29 19:18:52 crc kubenswrapper[4792]: I0929 19:18:52.280256 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.280234559 podStartE2EDuration="2.280234559s" podCreationTimestamp="2025-09-29 19:18:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 19:18:52.259642064 +0000 UTC m=+1344.252949470" watchObservedRunningTime="2025-09-29 19:18:52.280234559 +0000 UTC m=+1344.273541955" Sep 29 19:18:52 crc kubenswrapper[4792]: I0929 19:18:52.285950 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-novncproxy-0" podStartSLOduration=2.285927967 podStartE2EDuration="2.285927967s" podCreationTimestamp="2025-09-29 19:18:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 19:18:52.282510468 +0000 UTC m=+1344.275817894" watchObservedRunningTime="2025-09-29 19:18:52.285927967 +0000 UTC m=+1344.279235383" Sep 29 19:18:52 crc kubenswrapper[4792]: I0929 19:18:52.663838 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Sep 29 19:18:52 crc kubenswrapper[4792]: I0929 19:18:52.663930 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Sep 29 19:18:52 crc kubenswrapper[4792]: I0929 19:18:52.664288 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Sep 29 19:18:52 crc kubenswrapper[4792]: I0929 19:18:52.664308 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Sep 29 19:18:52 crc kubenswrapper[4792]: I0929 19:18:52.668179 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Sep 29 19:18:52 crc kubenswrapper[4792]: I0929 19:18:52.668507 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Sep 29 19:18:52 crc kubenswrapper[4792]: I0929 19:18:52.916564 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-cd5cbd7b9-jgj5c"] Sep 29 19:18:52 crc kubenswrapper[4792]: I0929 19:18:52.918491 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-cd5cbd7b9-jgj5c" Sep 29 19:18:52 crc kubenswrapper[4792]: I0929 19:18:52.925102 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-cd5cbd7b9-jgj5c"] Sep 29 19:18:53 crc kubenswrapper[4792]: I0929 19:18:53.043931 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/2c819d6e-de1e-4f6a-8135-ee279636481b-ovsdbserver-sb\") pod \"dnsmasq-dns-cd5cbd7b9-jgj5c\" (UID: \"2c819d6e-de1e-4f6a-8135-ee279636481b\") " pod="openstack/dnsmasq-dns-cd5cbd7b9-jgj5c" Sep 29 19:18:53 crc kubenswrapper[4792]: I0929 19:18:53.044044 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2c819d6e-de1e-4f6a-8135-ee279636481b-config\") pod \"dnsmasq-dns-cd5cbd7b9-jgj5c\" (UID: \"2c819d6e-de1e-4f6a-8135-ee279636481b\") " pod="openstack/dnsmasq-dns-cd5cbd7b9-jgj5c" Sep 29 19:18:53 crc kubenswrapper[4792]: I0929 19:18:53.044095 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/2c819d6e-de1e-4f6a-8135-ee279636481b-dns-swift-storage-0\") pod \"dnsmasq-dns-cd5cbd7b9-jgj5c\" (UID: \"2c819d6e-de1e-4f6a-8135-ee279636481b\") " pod="openstack/dnsmasq-dns-cd5cbd7b9-jgj5c" Sep 29 19:18:53 crc kubenswrapper[4792]: I0929 19:18:53.044131 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2c819d6e-de1e-4f6a-8135-ee279636481b-dns-svc\") pod \"dnsmasq-dns-cd5cbd7b9-jgj5c\" (UID: \"2c819d6e-de1e-4f6a-8135-ee279636481b\") " pod="openstack/dnsmasq-dns-cd5cbd7b9-jgj5c" Sep 29 19:18:53 crc kubenswrapper[4792]: I0929 19:18:53.044179 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/2c819d6e-de1e-4f6a-8135-ee279636481b-ovsdbserver-nb\") pod \"dnsmasq-dns-cd5cbd7b9-jgj5c\" (UID: \"2c819d6e-de1e-4f6a-8135-ee279636481b\") " pod="openstack/dnsmasq-dns-cd5cbd7b9-jgj5c" Sep 29 19:18:53 crc kubenswrapper[4792]: I0929 19:18:53.044210 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bbgnx\" (UniqueName: \"kubernetes.io/projected/2c819d6e-de1e-4f6a-8135-ee279636481b-kube-api-access-bbgnx\") pod \"dnsmasq-dns-cd5cbd7b9-jgj5c\" (UID: \"2c819d6e-de1e-4f6a-8135-ee279636481b\") " pod="openstack/dnsmasq-dns-cd5cbd7b9-jgj5c" Sep 29 19:18:53 crc kubenswrapper[4792]: I0929 19:18:53.145587 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/2c819d6e-de1e-4f6a-8135-ee279636481b-ovsdbserver-sb\") pod \"dnsmasq-dns-cd5cbd7b9-jgj5c\" (UID: \"2c819d6e-de1e-4f6a-8135-ee279636481b\") " pod="openstack/dnsmasq-dns-cd5cbd7b9-jgj5c" Sep 29 19:18:53 crc kubenswrapper[4792]: I0929 19:18:53.146017 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2c819d6e-de1e-4f6a-8135-ee279636481b-config\") pod \"dnsmasq-dns-cd5cbd7b9-jgj5c\" (UID: \"2c819d6e-de1e-4f6a-8135-ee279636481b\") " pod="openstack/dnsmasq-dns-cd5cbd7b9-jgj5c" Sep 29 19:18:53 crc kubenswrapper[4792]: I0929 19:18:53.146059 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/2c819d6e-de1e-4f6a-8135-ee279636481b-dns-swift-storage-0\") pod \"dnsmasq-dns-cd5cbd7b9-jgj5c\" (UID: \"2c819d6e-de1e-4f6a-8135-ee279636481b\") " pod="openstack/dnsmasq-dns-cd5cbd7b9-jgj5c" Sep 29 19:18:53 crc kubenswrapper[4792]: I0929 19:18:53.146082 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2c819d6e-de1e-4f6a-8135-ee279636481b-dns-svc\") pod \"dnsmasq-dns-cd5cbd7b9-jgj5c\" (UID: \"2c819d6e-de1e-4f6a-8135-ee279636481b\") " pod="openstack/dnsmasq-dns-cd5cbd7b9-jgj5c" Sep 29 19:18:53 crc kubenswrapper[4792]: I0929 19:18:53.146102 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/2c819d6e-de1e-4f6a-8135-ee279636481b-ovsdbserver-nb\") pod \"dnsmasq-dns-cd5cbd7b9-jgj5c\" (UID: \"2c819d6e-de1e-4f6a-8135-ee279636481b\") " pod="openstack/dnsmasq-dns-cd5cbd7b9-jgj5c" Sep 29 19:18:53 crc kubenswrapper[4792]: I0929 19:18:53.146123 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bbgnx\" (UniqueName: \"kubernetes.io/projected/2c819d6e-de1e-4f6a-8135-ee279636481b-kube-api-access-bbgnx\") pod \"dnsmasq-dns-cd5cbd7b9-jgj5c\" (UID: \"2c819d6e-de1e-4f6a-8135-ee279636481b\") " pod="openstack/dnsmasq-dns-cd5cbd7b9-jgj5c" Sep 29 19:18:53 crc kubenswrapper[4792]: I0929 19:18:53.146738 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/2c819d6e-de1e-4f6a-8135-ee279636481b-ovsdbserver-sb\") pod \"dnsmasq-dns-cd5cbd7b9-jgj5c\" (UID: \"2c819d6e-de1e-4f6a-8135-ee279636481b\") " pod="openstack/dnsmasq-dns-cd5cbd7b9-jgj5c" Sep 29 19:18:53 crc kubenswrapper[4792]: I0929 19:18:53.147240 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2c819d6e-de1e-4f6a-8135-ee279636481b-config\") pod \"dnsmasq-dns-cd5cbd7b9-jgj5c\" (UID: \"2c819d6e-de1e-4f6a-8135-ee279636481b\") " pod="openstack/dnsmasq-dns-cd5cbd7b9-jgj5c" Sep 29 19:18:53 crc kubenswrapper[4792]: I0929 19:18:53.147464 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2c819d6e-de1e-4f6a-8135-ee279636481b-dns-svc\") pod \"dnsmasq-dns-cd5cbd7b9-jgj5c\" (UID: \"2c819d6e-de1e-4f6a-8135-ee279636481b\") " pod="openstack/dnsmasq-dns-cd5cbd7b9-jgj5c" Sep 29 19:18:53 crc kubenswrapper[4792]: I0929 19:18:53.147796 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/2c819d6e-de1e-4f6a-8135-ee279636481b-ovsdbserver-nb\") pod \"dnsmasq-dns-cd5cbd7b9-jgj5c\" (UID: \"2c819d6e-de1e-4f6a-8135-ee279636481b\") " pod="openstack/dnsmasq-dns-cd5cbd7b9-jgj5c" Sep 29 19:18:53 crc kubenswrapper[4792]: I0929 19:18:53.148140 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/2c819d6e-de1e-4f6a-8135-ee279636481b-dns-swift-storage-0\") pod \"dnsmasq-dns-cd5cbd7b9-jgj5c\" (UID: \"2c819d6e-de1e-4f6a-8135-ee279636481b\") " pod="openstack/dnsmasq-dns-cd5cbd7b9-jgj5c" Sep 29 19:18:53 crc kubenswrapper[4792]: I0929 19:18:53.165582 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bbgnx\" (UniqueName: \"kubernetes.io/projected/2c819d6e-de1e-4f6a-8135-ee279636481b-kube-api-access-bbgnx\") pod \"dnsmasq-dns-cd5cbd7b9-jgj5c\" (UID: \"2c819d6e-de1e-4f6a-8135-ee279636481b\") " pod="openstack/dnsmasq-dns-cd5cbd7b9-jgj5c" Sep 29 19:18:53 crc kubenswrapper[4792]: I0929 19:18:53.240695 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-cd5cbd7b9-jgj5c" Sep 29 19:18:53 crc kubenswrapper[4792]: I0929 19:18:53.769862 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-cd5cbd7b9-jgj5c"] Sep 29 19:18:54 crc kubenswrapper[4792]: I0929 19:18:54.265447 4792 generic.go:334] "Generic (PLEG): container finished" podID="2c819d6e-de1e-4f6a-8135-ee279636481b" containerID="400e0770911bb82b381bedb17f63a143e63b6f2f9098e21b968def592e658603" exitCode=0 Sep 29 19:18:54 crc kubenswrapper[4792]: I0929 19:18:54.265488 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-cd5cbd7b9-jgj5c" event={"ID":"2c819d6e-de1e-4f6a-8135-ee279636481b","Type":"ContainerDied","Data":"400e0770911bb82b381bedb17f63a143e63b6f2f9098e21b968def592e658603"} Sep 29 19:18:54 crc kubenswrapper[4792]: I0929 19:18:54.265837 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-cd5cbd7b9-jgj5c" event={"ID":"2c819d6e-de1e-4f6a-8135-ee279636481b","Type":"ContainerStarted","Data":"ec722ac38462fff60007ed89f917d5e2c786ef1c5dae2f455cf1fb9a25ce04ca"} Sep 29 19:18:54 crc kubenswrapper[4792]: I0929 19:18:54.973601 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Sep 29 19:18:54 crc kubenswrapper[4792]: I0929 19:18:54.974140 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="91353c86-67a7-4198-9391-52f85954178b" containerName="ceilometer-central-agent" containerID="cri-o://ed532c4335acbfbb86be6f8c93512999da4dfc9f16d8f731b523448f192e7f34" gracePeriod=30 Sep 29 19:18:54 crc kubenswrapper[4792]: I0929 19:18:54.974282 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="91353c86-67a7-4198-9391-52f85954178b" containerName="sg-core" containerID="cri-o://499214427f97d1a773a82d897fa75d48133af35219c8be27e1de5e4152297a40" gracePeriod=30 Sep 29 19:18:54 crc kubenswrapper[4792]: I0929 19:18:54.974279 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="91353c86-67a7-4198-9391-52f85954178b" containerName="ceilometer-notification-agent" containerID="cri-o://c49501a3bbf3d20523fabdfdf6904f4a91a81183311bff628cb251440721d573" gracePeriod=30 Sep 29 19:18:54 crc kubenswrapper[4792]: I0929 19:18:54.974544 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="91353c86-67a7-4198-9391-52f85954178b" containerName="proxy-httpd" containerID="cri-o://15bfd0deb8a8a98c16ef7559560742f5fd68ac05144c5c97b768c09b64ef6f0e" gracePeriod=30 Sep 29 19:18:54 crc kubenswrapper[4792]: I0929 19:18:54.987973 4792 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ceilometer-0" podUID="91353c86-67a7-4198-9391-52f85954178b" containerName="proxy-httpd" probeResult="failure" output="Get \"https://10.217.0.195:3000/\": EOF" Sep 29 19:18:55 crc kubenswrapper[4792]: I0929 19:18:55.278314 4792 generic.go:334] "Generic (PLEG): container finished" podID="91353c86-67a7-4198-9391-52f85954178b" containerID="15bfd0deb8a8a98c16ef7559560742f5fd68ac05144c5c97b768c09b64ef6f0e" exitCode=0 Sep 29 19:18:55 crc kubenswrapper[4792]: I0929 19:18:55.278353 4792 generic.go:334] "Generic (PLEG): container finished" podID="91353c86-67a7-4198-9391-52f85954178b" containerID="499214427f97d1a773a82d897fa75d48133af35219c8be27e1de5e4152297a40" exitCode=2 Sep 29 19:18:55 crc kubenswrapper[4792]: I0929 19:18:55.278364 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"91353c86-67a7-4198-9391-52f85954178b","Type":"ContainerDied","Data":"15bfd0deb8a8a98c16ef7559560742f5fd68ac05144c5c97b768c09b64ef6f0e"} Sep 29 19:18:55 crc kubenswrapper[4792]: I0929 19:18:55.278451 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"91353c86-67a7-4198-9391-52f85954178b","Type":"ContainerDied","Data":"499214427f97d1a773a82d897fa75d48133af35219c8be27e1de5e4152297a40"} Sep 29 19:18:55 crc kubenswrapper[4792]: I0929 19:18:55.282532 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-cd5cbd7b9-jgj5c" event={"ID":"2c819d6e-de1e-4f6a-8135-ee279636481b","Type":"ContainerStarted","Data":"ac93fb19563706192b5e969214a70894422756e19f89e1e0a014c49c69548f18"} Sep 29 19:18:55 crc kubenswrapper[4792]: I0929 19:18:55.282717 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-cd5cbd7b9-jgj5c" Sep 29 19:18:55 crc kubenswrapper[4792]: I0929 19:18:55.307507 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-cd5cbd7b9-jgj5c" podStartSLOduration=3.307489249 podStartE2EDuration="3.307489249s" podCreationTimestamp="2025-09-29 19:18:52 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 19:18:55.305727353 +0000 UTC m=+1347.299034769" watchObservedRunningTime="2025-09-29 19:18:55.307489249 +0000 UTC m=+1347.300796635" Sep 29 19:18:55 crc kubenswrapper[4792]: I0929 19:18:55.396322 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Sep 29 19:18:55 crc kubenswrapper[4792]: I0929 19:18:55.396543 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="5d563b15-07e2-4115-b1fa-d397717d62cd" containerName="nova-api-log" containerID="cri-o://9554ad31091d048147a1361f625613950b33dd3cf15f712af4d78c09030ea0df" gracePeriod=30 Sep 29 19:18:55 crc kubenswrapper[4792]: I0929 19:18:55.396709 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="5d563b15-07e2-4115-b1fa-d397717d62cd" containerName="nova-api-api" containerID="cri-o://66a2fb9afdf9489165d821300f81faadb832359e6f2c63753284156b550ac357" gracePeriod=30 Sep 29 19:18:55 crc kubenswrapper[4792]: I0929 19:18:55.599118 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-novncproxy-0" Sep 29 19:18:55 crc kubenswrapper[4792]: I0929 19:18:55.670750 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Sep 29 19:18:55 crc kubenswrapper[4792]: I0929 19:18:55.670794 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Sep 29 19:18:56 crc kubenswrapper[4792]: I0929 19:18:56.306767 4792 generic.go:334] "Generic (PLEG): container finished" podID="91353c86-67a7-4198-9391-52f85954178b" containerID="ed532c4335acbfbb86be6f8c93512999da4dfc9f16d8f731b523448f192e7f34" exitCode=0 Sep 29 19:18:56 crc kubenswrapper[4792]: I0929 19:18:56.307153 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"91353c86-67a7-4198-9391-52f85954178b","Type":"ContainerDied","Data":"ed532c4335acbfbb86be6f8c93512999da4dfc9f16d8f731b523448f192e7f34"} Sep 29 19:18:56 crc kubenswrapper[4792]: I0929 19:18:56.316472 4792 generic.go:334] "Generic (PLEG): container finished" podID="5d563b15-07e2-4115-b1fa-d397717d62cd" containerID="9554ad31091d048147a1361f625613950b33dd3cf15f712af4d78c09030ea0df" exitCode=143 Sep 29 19:18:56 crc kubenswrapper[4792]: I0929 19:18:56.316592 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"5d563b15-07e2-4115-b1fa-d397717d62cd","Type":"ContainerDied","Data":"9554ad31091d048147a1361f625613950b33dd3cf15f712af4d78c09030ea0df"} Sep 29 19:18:59 crc kubenswrapper[4792]: I0929 19:18:59.046703 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 29 19:18:59 crc kubenswrapper[4792]: I0929 19:18:59.144016 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/91353c86-67a7-4198-9391-52f85954178b-ceilometer-tls-certs\") pod \"91353c86-67a7-4198-9391-52f85954178b\" (UID: \"91353c86-67a7-4198-9391-52f85954178b\") " Sep 29 19:18:59 crc kubenswrapper[4792]: I0929 19:18:59.144081 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/91353c86-67a7-4198-9391-52f85954178b-scripts\") pod \"91353c86-67a7-4198-9391-52f85954178b\" (UID: \"91353c86-67a7-4198-9391-52f85954178b\") " Sep 29 19:18:59 crc kubenswrapper[4792]: I0929 19:18:59.144138 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/91353c86-67a7-4198-9391-52f85954178b-combined-ca-bundle\") pod \"91353c86-67a7-4198-9391-52f85954178b\" (UID: \"91353c86-67a7-4198-9391-52f85954178b\") " Sep 29 19:18:59 crc kubenswrapper[4792]: I0929 19:18:59.144204 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/91353c86-67a7-4198-9391-52f85954178b-config-data\") pod \"91353c86-67a7-4198-9391-52f85954178b\" (UID: \"91353c86-67a7-4198-9391-52f85954178b\") " Sep 29 19:18:59 crc kubenswrapper[4792]: I0929 19:18:59.144243 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/91353c86-67a7-4198-9391-52f85954178b-run-httpd\") pod \"91353c86-67a7-4198-9391-52f85954178b\" (UID: \"91353c86-67a7-4198-9391-52f85954178b\") " Sep 29 19:18:59 crc kubenswrapper[4792]: I0929 19:18:59.144263 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/91353c86-67a7-4198-9391-52f85954178b-sg-core-conf-yaml\") pod \"91353c86-67a7-4198-9391-52f85954178b\" (UID: \"91353c86-67a7-4198-9391-52f85954178b\") " Sep 29 19:18:59 crc kubenswrapper[4792]: I0929 19:18:59.144326 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dmxmh\" (UniqueName: \"kubernetes.io/projected/91353c86-67a7-4198-9391-52f85954178b-kube-api-access-dmxmh\") pod \"91353c86-67a7-4198-9391-52f85954178b\" (UID: \"91353c86-67a7-4198-9391-52f85954178b\") " Sep 29 19:18:59 crc kubenswrapper[4792]: I0929 19:18:59.144398 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/91353c86-67a7-4198-9391-52f85954178b-log-httpd\") pod \"91353c86-67a7-4198-9391-52f85954178b\" (UID: \"91353c86-67a7-4198-9391-52f85954178b\") " Sep 29 19:18:59 crc kubenswrapper[4792]: I0929 19:18:59.145230 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/91353c86-67a7-4198-9391-52f85954178b-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "91353c86-67a7-4198-9391-52f85954178b" (UID: "91353c86-67a7-4198-9391-52f85954178b"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 19:18:59 crc kubenswrapper[4792]: I0929 19:18:59.147265 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/91353c86-67a7-4198-9391-52f85954178b-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "91353c86-67a7-4198-9391-52f85954178b" (UID: "91353c86-67a7-4198-9391-52f85954178b"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 19:18:59 crc kubenswrapper[4792]: I0929 19:18:59.157289 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/91353c86-67a7-4198-9391-52f85954178b-scripts" (OuterVolumeSpecName: "scripts") pod "91353c86-67a7-4198-9391-52f85954178b" (UID: "91353c86-67a7-4198-9391-52f85954178b"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:18:59 crc kubenswrapper[4792]: I0929 19:18:59.168012 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/91353c86-67a7-4198-9391-52f85954178b-kube-api-access-dmxmh" (OuterVolumeSpecName: "kube-api-access-dmxmh") pod "91353c86-67a7-4198-9391-52f85954178b" (UID: "91353c86-67a7-4198-9391-52f85954178b"). InnerVolumeSpecName "kube-api-access-dmxmh". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:18:59 crc kubenswrapper[4792]: I0929 19:18:59.204571 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/91353c86-67a7-4198-9391-52f85954178b-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "91353c86-67a7-4198-9391-52f85954178b" (UID: "91353c86-67a7-4198-9391-52f85954178b"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:18:59 crc kubenswrapper[4792]: I0929 19:18:59.247639 4792 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/91353c86-67a7-4198-9391-52f85954178b-run-httpd\") on node \"crc\" DevicePath \"\"" Sep 29 19:18:59 crc kubenswrapper[4792]: I0929 19:18:59.247665 4792 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/91353c86-67a7-4198-9391-52f85954178b-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Sep 29 19:18:59 crc kubenswrapper[4792]: I0929 19:18:59.247676 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dmxmh\" (UniqueName: \"kubernetes.io/projected/91353c86-67a7-4198-9391-52f85954178b-kube-api-access-dmxmh\") on node \"crc\" DevicePath \"\"" Sep 29 19:18:59 crc kubenswrapper[4792]: I0929 19:18:59.247684 4792 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/91353c86-67a7-4198-9391-52f85954178b-log-httpd\") on node \"crc\" DevicePath \"\"" Sep 29 19:18:59 crc kubenswrapper[4792]: I0929 19:18:59.247692 4792 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/91353c86-67a7-4198-9391-52f85954178b-scripts\") on node \"crc\" DevicePath \"\"" Sep 29 19:18:59 crc kubenswrapper[4792]: I0929 19:18:59.253145 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Sep 29 19:18:59 crc kubenswrapper[4792]: I0929 19:18:59.270000 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/91353c86-67a7-4198-9391-52f85954178b-ceilometer-tls-certs" (OuterVolumeSpecName: "ceilometer-tls-certs") pod "91353c86-67a7-4198-9391-52f85954178b" (UID: "91353c86-67a7-4198-9391-52f85954178b"). InnerVolumeSpecName "ceilometer-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:18:59 crc kubenswrapper[4792]: I0929 19:18:59.292938 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/91353c86-67a7-4198-9391-52f85954178b-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "91353c86-67a7-4198-9391-52f85954178b" (UID: "91353c86-67a7-4198-9391-52f85954178b"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:18:59 crc kubenswrapper[4792]: I0929 19:18:59.336070 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/91353c86-67a7-4198-9391-52f85954178b-config-data" (OuterVolumeSpecName: "config-data") pod "91353c86-67a7-4198-9391-52f85954178b" (UID: "91353c86-67a7-4198-9391-52f85954178b"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:18:59 crc kubenswrapper[4792]: I0929 19:18:59.349193 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5d563b15-07e2-4115-b1fa-d397717d62cd-config-data\") pod \"5d563b15-07e2-4115-b1fa-d397717d62cd\" (UID: \"5d563b15-07e2-4115-b1fa-d397717d62cd\") " Sep 29 19:18:59 crc kubenswrapper[4792]: I0929 19:18:59.349438 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5d563b15-07e2-4115-b1fa-d397717d62cd-combined-ca-bundle\") pod \"5d563b15-07e2-4115-b1fa-d397717d62cd\" (UID: \"5d563b15-07e2-4115-b1fa-d397717d62cd\") " Sep 29 19:18:59 crc kubenswrapper[4792]: I0929 19:18:59.349506 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cs8vc\" (UniqueName: \"kubernetes.io/projected/5d563b15-07e2-4115-b1fa-d397717d62cd-kube-api-access-cs8vc\") pod \"5d563b15-07e2-4115-b1fa-d397717d62cd\" (UID: \"5d563b15-07e2-4115-b1fa-d397717d62cd\") " Sep 29 19:18:59 crc kubenswrapper[4792]: I0929 19:18:59.349616 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5d563b15-07e2-4115-b1fa-d397717d62cd-logs\") pod \"5d563b15-07e2-4115-b1fa-d397717d62cd\" (UID: \"5d563b15-07e2-4115-b1fa-d397717d62cd\") " Sep 29 19:18:59 crc kubenswrapper[4792]: I0929 19:18:59.350320 4792 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/91353c86-67a7-4198-9391-52f85954178b-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 19:18:59 crc kubenswrapper[4792]: I0929 19:18:59.350339 4792 reconciler_common.go:293] "Volume detached for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/91353c86-67a7-4198-9391-52f85954178b-ceilometer-tls-certs\") on node \"crc\" DevicePath \"\"" Sep 29 19:18:59 crc kubenswrapper[4792]: I0929 19:18:59.350347 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5d563b15-07e2-4115-b1fa-d397717d62cd-logs" (OuterVolumeSpecName: "logs") pod "5d563b15-07e2-4115-b1fa-d397717d62cd" (UID: "5d563b15-07e2-4115-b1fa-d397717d62cd"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 19:18:59 crc kubenswrapper[4792]: I0929 19:18:59.350350 4792 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/91353c86-67a7-4198-9391-52f85954178b-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 19:18:59 crc kubenswrapper[4792]: I0929 19:18:59.357218 4792 generic.go:334] "Generic (PLEG): container finished" podID="91353c86-67a7-4198-9391-52f85954178b" containerID="c49501a3bbf3d20523fabdfdf6904f4a91a81183311bff628cb251440721d573" exitCode=0 Sep 29 19:18:59 crc kubenswrapper[4792]: I0929 19:18:59.357296 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"91353c86-67a7-4198-9391-52f85954178b","Type":"ContainerDied","Data":"c49501a3bbf3d20523fabdfdf6904f4a91a81183311bff628cb251440721d573"} Sep 29 19:18:59 crc kubenswrapper[4792]: I0929 19:18:59.357328 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"91353c86-67a7-4198-9391-52f85954178b","Type":"ContainerDied","Data":"6da737f91740811b06392df0cb9a37afc3925a9fad4058839a43b2a1bc0d608c"} Sep 29 19:18:59 crc kubenswrapper[4792]: I0929 19:18:59.357345 4792 scope.go:117] "RemoveContainer" containerID="15bfd0deb8a8a98c16ef7559560742f5fd68ac05144c5c97b768c09b64ef6f0e" Sep 29 19:18:59 crc kubenswrapper[4792]: I0929 19:18:59.357469 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 29 19:18:59 crc kubenswrapper[4792]: I0929 19:18:59.361267 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5d563b15-07e2-4115-b1fa-d397717d62cd-kube-api-access-cs8vc" (OuterVolumeSpecName: "kube-api-access-cs8vc") pod "5d563b15-07e2-4115-b1fa-d397717d62cd" (UID: "5d563b15-07e2-4115-b1fa-d397717d62cd"). InnerVolumeSpecName "kube-api-access-cs8vc". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:18:59 crc kubenswrapper[4792]: I0929 19:18:59.363745 4792 generic.go:334] "Generic (PLEG): container finished" podID="5d563b15-07e2-4115-b1fa-d397717d62cd" containerID="66a2fb9afdf9489165d821300f81faadb832359e6f2c63753284156b550ac357" exitCode=0 Sep 29 19:18:59 crc kubenswrapper[4792]: I0929 19:18:59.363784 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"5d563b15-07e2-4115-b1fa-d397717d62cd","Type":"ContainerDied","Data":"66a2fb9afdf9489165d821300f81faadb832359e6f2c63753284156b550ac357"} Sep 29 19:18:59 crc kubenswrapper[4792]: I0929 19:18:59.363809 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"5d563b15-07e2-4115-b1fa-d397717d62cd","Type":"ContainerDied","Data":"792b9b5913edd2ada2f518fb6f9ba7b6a727deccde7ed3f1b29a6c795d3d6b2a"} Sep 29 19:18:59 crc kubenswrapper[4792]: I0929 19:18:59.363900 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Sep 29 19:18:59 crc kubenswrapper[4792]: I0929 19:18:59.389666 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5d563b15-07e2-4115-b1fa-d397717d62cd-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "5d563b15-07e2-4115-b1fa-d397717d62cd" (UID: "5d563b15-07e2-4115-b1fa-d397717d62cd"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:18:59 crc kubenswrapper[4792]: I0929 19:18:59.393051 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5d563b15-07e2-4115-b1fa-d397717d62cd-config-data" (OuterVolumeSpecName: "config-data") pod "5d563b15-07e2-4115-b1fa-d397717d62cd" (UID: "5d563b15-07e2-4115-b1fa-d397717d62cd"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:18:59 crc kubenswrapper[4792]: I0929 19:18:59.406466 4792 scope.go:117] "RemoveContainer" containerID="499214427f97d1a773a82d897fa75d48133af35219c8be27e1de5e4152297a40" Sep 29 19:18:59 crc kubenswrapper[4792]: I0929 19:18:59.431725 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Sep 29 19:18:59 crc kubenswrapper[4792]: I0929 19:18:59.451767 4792 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5d563b15-07e2-4115-b1fa-d397717d62cd-logs\") on node \"crc\" DevicePath \"\"" Sep 29 19:18:59 crc kubenswrapper[4792]: I0929 19:18:59.451798 4792 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5d563b15-07e2-4115-b1fa-d397717d62cd-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 19:18:59 crc kubenswrapper[4792]: I0929 19:18:59.451807 4792 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5d563b15-07e2-4115-b1fa-d397717d62cd-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 19:18:59 crc kubenswrapper[4792]: I0929 19:18:59.451818 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cs8vc\" (UniqueName: \"kubernetes.io/projected/5d563b15-07e2-4115-b1fa-d397717d62cd-kube-api-access-cs8vc\") on node \"crc\" DevicePath \"\"" Sep 29 19:18:59 crc kubenswrapper[4792]: I0929 19:18:59.457150 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Sep 29 19:18:59 crc kubenswrapper[4792]: I0929 19:18:59.466055 4792 scope.go:117] "RemoveContainer" containerID="c49501a3bbf3d20523fabdfdf6904f4a91a81183311bff628cb251440721d573" Sep 29 19:18:59 crc kubenswrapper[4792]: I0929 19:18:59.471681 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Sep 29 19:18:59 crc kubenswrapper[4792]: E0929 19:18:59.472160 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="91353c86-67a7-4198-9391-52f85954178b" containerName="ceilometer-notification-agent" Sep 29 19:18:59 crc kubenswrapper[4792]: I0929 19:18:59.472180 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="91353c86-67a7-4198-9391-52f85954178b" containerName="ceilometer-notification-agent" Sep 29 19:18:59 crc kubenswrapper[4792]: E0929 19:18:59.472201 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="91353c86-67a7-4198-9391-52f85954178b" containerName="ceilometer-central-agent" Sep 29 19:18:59 crc kubenswrapper[4792]: I0929 19:18:59.472209 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="91353c86-67a7-4198-9391-52f85954178b" containerName="ceilometer-central-agent" Sep 29 19:18:59 crc kubenswrapper[4792]: E0929 19:18:59.472218 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5d563b15-07e2-4115-b1fa-d397717d62cd" containerName="nova-api-log" Sep 29 19:18:59 crc kubenswrapper[4792]: I0929 19:18:59.472225 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="5d563b15-07e2-4115-b1fa-d397717d62cd" containerName="nova-api-log" Sep 29 19:18:59 crc kubenswrapper[4792]: E0929 19:18:59.472243 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="91353c86-67a7-4198-9391-52f85954178b" containerName="proxy-httpd" Sep 29 19:18:59 crc kubenswrapper[4792]: I0929 19:18:59.472250 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="91353c86-67a7-4198-9391-52f85954178b" containerName="proxy-httpd" Sep 29 19:18:59 crc kubenswrapper[4792]: E0929 19:18:59.472261 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5d563b15-07e2-4115-b1fa-d397717d62cd" containerName="nova-api-api" Sep 29 19:18:59 crc kubenswrapper[4792]: I0929 19:18:59.472266 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="5d563b15-07e2-4115-b1fa-d397717d62cd" containerName="nova-api-api" Sep 29 19:18:59 crc kubenswrapper[4792]: E0929 19:18:59.472293 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="91353c86-67a7-4198-9391-52f85954178b" containerName="sg-core" Sep 29 19:18:59 crc kubenswrapper[4792]: I0929 19:18:59.472299 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="91353c86-67a7-4198-9391-52f85954178b" containerName="sg-core" Sep 29 19:18:59 crc kubenswrapper[4792]: I0929 19:18:59.472483 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="91353c86-67a7-4198-9391-52f85954178b" containerName="sg-core" Sep 29 19:18:59 crc kubenswrapper[4792]: I0929 19:18:59.472494 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="91353c86-67a7-4198-9391-52f85954178b" containerName="ceilometer-central-agent" Sep 29 19:18:59 crc kubenswrapper[4792]: I0929 19:18:59.472513 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="5d563b15-07e2-4115-b1fa-d397717d62cd" containerName="nova-api-log" Sep 29 19:18:59 crc kubenswrapper[4792]: I0929 19:18:59.472522 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="91353c86-67a7-4198-9391-52f85954178b" containerName="proxy-httpd" Sep 29 19:18:59 crc kubenswrapper[4792]: I0929 19:18:59.472534 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="5d563b15-07e2-4115-b1fa-d397717d62cd" containerName="nova-api-api" Sep 29 19:18:59 crc kubenswrapper[4792]: I0929 19:18:59.472546 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="91353c86-67a7-4198-9391-52f85954178b" containerName="ceilometer-notification-agent" Sep 29 19:18:59 crc kubenswrapper[4792]: I0929 19:18:59.479116 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 29 19:18:59 crc kubenswrapper[4792]: I0929 19:18:59.489823 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ceilometer-internal-svc" Sep 29 19:18:59 crc kubenswrapper[4792]: I0929 19:18:59.490060 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Sep 29 19:18:59 crc kubenswrapper[4792]: I0929 19:18:59.490189 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Sep 29 19:18:59 crc kubenswrapper[4792]: I0929 19:18:59.511910 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Sep 29 19:18:59 crc kubenswrapper[4792]: I0929 19:18:59.548100 4792 scope.go:117] "RemoveContainer" containerID="ed532c4335acbfbb86be6f8c93512999da4dfc9f16d8f731b523448f192e7f34" Sep 29 19:18:59 crc kubenswrapper[4792]: I0929 19:18:59.555064 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d4da2b2f-cfd1-4d28-96fb-c7c93955a3b4-run-httpd\") pod \"ceilometer-0\" (UID: \"d4da2b2f-cfd1-4d28-96fb-c7c93955a3b4\") " pod="openstack/ceilometer-0" Sep 29 19:18:59 crc kubenswrapper[4792]: I0929 19:18:59.555483 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d4da2b2f-cfd1-4d28-96fb-c7c93955a3b4-log-httpd\") pod \"ceilometer-0\" (UID: \"d4da2b2f-cfd1-4d28-96fb-c7c93955a3b4\") " pod="openstack/ceilometer-0" Sep 29 19:18:59 crc kubenswrapper[4792]: I0929 19:18:59.555511 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d4da2b2f-cfd1-4d28-96fb-c7c93955a3b4-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"d4da2b2f-cfd1-4d28-96fb-c7c93955a3b4\") " pod="openstack/ceilometer-0" Sep 29 19:18:59 crc kubenswrapper[4792]: I0929 19:18:59.555564 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d4da2b2f-cfd1-4d28-96fb-c7c93955a3b4-config-data\") pod \"ceilometer-0\" (UID: \"d4da2b2f-cfd1-4d28-96fb-c7c93955a3b4\") " pod="openstack/ceilometer-0" Sep 29 19:18:59 crc kubenswrapper[4792]: I0929 19:18:59.555597 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d4da2b2f-cfd1-4d28-96fb-c7c93955a3b4-scripts\") pod \"ceilometer-0\" (UID: \"d4da2b2f-cfd1-4d28-96fb-c7c93955a3b4\") " pod="openstack/ceilometer-0" Sep 29 19:18:59 crc kubenswrapper[4792]: I0929 19:18:59.555647 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4vvpc\" (UniqueName: \"kubernetes.io/projected/d4da2b2f-cfd1-4d28-96fb-c7c93955a3b4-kube-api-access-4vvpc\") pod \"ceilometer-0\" (UID: \"d4da2b2f-cfd1-4d28-96fb-c7c93955a3b4\") " pod="openstack/ceilometer-0" Sep 29 19:18:59 crc kubenswrapper[4792]: I0929 19:18:59.555704 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/d4da2b2f-cfd1-4d28-96fb-c7c93955a3b4-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"d4da2b2f-cfd1-4d28-96fb-c7c93955a3b4\") " pod="openstack/ceilometer-0" Sep 29 19:18:59 crc kubenswrapper[4792]: I0929 19:18:59.555725 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/d4da2b2f-cfd1-4d28-96fb-c7c93955a3b4-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"d4da2b2f-cfd1-4d28-96fb-c7c93955a3b4\") " pod="openstack/ceilometer-0" Sep 29 19:18:59 crc kubenswrapper[4792]: I0929 19:18:59.599978 4792 scope.go:117] "RemoveContainer" containerID="15bfd0deb8a8a98c16ef7559560742f5fd68ac05144c5c97b768c09b64ef6f0e" Sep 29 19:18:59 crc kubenswrapper[4792]: E0929 19:18:59.604790 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"15bfd0deb8a8a98c16ef7559560742f5fd68ac05144c5c97b768c09b64ef6f0e\": container with ID starting with 15bfd0deb8a8a98c16ef7559560742f5fd68ac05144c5c97b768c09b64ef6f0e not found: ID does not exist" containerID="15bfd0deb8a8a98c16ef7559560742f5fd68ac05144c5c97b768c09b64ef6f0e" Sep 29 19:18:59 crc kubenswrapper[4792]: I0929 19:18:59.604838 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"15bfd0deb8a8a98c16ef7559560742f5fd68ac05144c5c97b768c09b64ef6f0e"} err="failed to get container status \"15bfd0deb8a8a98c16ef7559560742f5fd68ac05144c5c97b768c09b64ef6f0e\": rpc error: code = NotFound desc = could not find container \"15bfd0deb8a8a98c16ef7559560742f5fd68ac05144c5c97b768c09b64ef6f0e\": container with ID starting with 15bfd0deb8a8a98c16ef7559560742f5fd68ac05144c5c97b768c09b64ef6f0e not found: ID does not exist" Sep 29 19:18:59 crc kubenswrapper[4792]: I0929 19:18:59.604881 4792 scope.go:117] "RemoveContainer" containerID="499214427f97d1a773a82d897fa75d48133af35219c8be27e1de5e4152297a40" Sep 29 19:18:59 crc kubenswrapper[4792]: E0929 19:18:59.605383 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"499214427f97d1a773a82d897fa75d48133af35219c8be27e1de5e4152297a40\": container with ID starting with 499214427f97d1a773a82d897fa75d48133af35219c8be27e1de5e4152297a40 not found: ID does not exist" containerID="499214427f97d1a773a82d897fa75d48133af35219c8be27e1de5e4152297a40" Sep 29 19:18:59 crc kubenswrapper[4792]: I0929 19:18:59.605439 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"499214427f97d1a773a82d897fa75d48133af35219c8be27e1de5e4152297a40"} err="failed to get container status \"499214427f97d1a773a82d897fa75d48133af35219c8be27e1de5e4152297a40\": rpc error: code = NotFound desc = could not find container \"499214427f97d1a773a82d897fa75d48133af35219c8be27e1de5e4152297a40\": container with ID starting with 499214427f97d1a773a82d897fa75d48133af35219c8be27e1de5e4152297a40 not found: ID does not exist" Sep 29 19:18:59 crc kubenswrapper[4792]: I0929 19:18:59.605466 4792 scope.go:117] "RemoveContainer" containerID="c49501a3bbf3d20523fabdfdf6904f4a91a81183311bff628cb251440721d573" Sep 29 19:18:59 crc kubenswrapper[4792]: E0929 19:18:59.606954 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c49501a3bbf3d20523fabdfdf6904f4a91a81183311bff628cb251440721d573\": container with ID starting with c49501a3bbf3d20523fabdfdf6904f4a91a81183311bff628cb251440721d573 not found: ID does not exist" containerID="c49501a3bbf3d20523fabdfdf6904f4a91a81183311bff628cb251440721d573" Sep 29 19:18:59 crc kubenswrapper[4792]: I0929 19:18:59.606996 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c49501a3bbf3d20523fabdfdf6904f4a91a81183311bff628cb251440721d573"} err="failed to get container status \"c49501a3bbf3d20523fabdfdf6904f4a91a81183311bff628cb251440721d573\": rpc error: code = NotFound desc = could not find container \"c49501a3bbf3d20523fabdfdf6904f4a91a81183311bff628cb251440721d573\": container with ID starting with c49501a3bbf3d20523fabdfdf6904f4a91a81183311bff628cb251440721d573 not found: ID does not exist" Sep 29 19:18:59 crc kubenswrapper[4792]: I0929 19:18:59.607042 4792 scope.go:117] "RemoveContainer" containerID="ed532c4335acbfbb86be6f8c93512999da4dfc9f16d8f731b523448f192e7f34" Sep 29 19:18:59 crc kubenswrapper[4792]: E0929 19:18:59.607456 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ed532c4335acbfbb86be6f8c93512999da4dfc9f16d8f731b523448f192e7f34\": container with ID starting with ed532c4335acbfbb86be6f8c93512999da4dfc9f16d8f731b523448f192e7f34 not found: ID does not exist" containerID="ed532c4335acbfbb86be6f8c93512999da4dfc9f16d8f731b523448f192e7f34" Sep 29 19:18:59 crc kubenswrapper[4792]: I0929 19:18:59.607477 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ed532c4335acbfbb86be6f8c93512999da4dfc9f16d8f731b523448f192e7f34"} err="failed to get container status \"ed532c4335acbfbb86be6f8c93512999da4dfc9f16d8f731b523448f192e7f34\": rpc error: code = NotFound desc = could not find container \"ed532c4335acbfbb86be6f8c93512999da4dfc9f16d8f731b523448f192e7f34\": container with ID starting with ed532c4335acbfbb86be6f8c93512999da4dfc9f16d8f731b523448f192e7f34 not found: ID does not exist" Sep 29 19:18:59 crc kubenswrapper[4792]: I0929 19:18:59.607513 4792 scope.go:117] "RemoveContainer" containerID="66a2fb9afdf9489165d821300f81faadb832359e6f2c63753284156b550ac357" Sep 29 19:18:59 crc kubenswrapper[4792]: I0929 19:18:59.633532 4792 scope.go:117] "RemoveContainer" containerID="9554ad31091d048147a1361f625613950b33dd3cf15f712af4d78c09030ea0df" Sep 29 19:18:59 crc kubenswrapper[4792]: I0929 19:18:59.662215 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d4da2b2f-cfd1-4d28-96fb-c7c93955a3b4-log-httpd\") pod \"ceilometer-0\" (UID: \"d4da2b2f-cfd1-4d28-96fb-c7c93955a3b4\") " pod="openstack/ceilometer-0" Sep 29 19:18:59 crc kubenswrapper[4792]: I0929 19:18:59.662253 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d4da2b2f-cfd1-4d28-96fb-c7c93955a3b4-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"d4da2b2f-cfd1-4d28-96fb-c7c93955a3b4\") " pod="openstack/ceilometer-0" Sep 29 19:18:59 crc kubenswrapper[4792]: I0929 19:18:59.662291 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d4da2b2f-cfd1-4d28-96fb-c7c93955a3b4-config-data\") pod \"ceilometer-0\" (UID: \"d4da2b2f-cfd1-4d28-96fb-c7c93955a3b4\") " pod="openstack/ceilometer-0" Sep 29 19:18:59 crc kubenswrapper[4792]: I0929 19:18:59.662321 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d4da2b2f-cfd1-4d28-96fb-c7c93955a3b4-scripts\") pod \"ceilometer-0\" (UID: \"d4da2b2f-cfd1-4d28-96fb-c7c93955a3b4\") " pod="openstack/ceilometer-0" Sep 29 19:18:59 crc kubenswrapper[4792]: I0929 19:18:59.662346 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4vvpc\" (UniqueName: \"kubernetes.io/projected/d4da2b2f-cfd1-4d28-96fb-c7c93955a3b4-kube-api-access-4vvpc\") pod \"ceilometer-0\" (UID: \"d4da2b2f-cfd1-4d28-96fb-c7c93955a3b4\") " pod="openstack/ceilometer-0" Sep 29 19:18:59 crc kubenswrapper[4792]: I0929 19:18:59.662383 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/d4da2b2f-cfd1-4d28-96fb-c7c93955a3b4-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"d4da2b2f-cfd1-4d28-96fb-c7c93955a3b4\") " pod="openstack/ceilometer-0" Sep 29 19:18:59 crc kubenswrapper[4792]: I0929 19:18:59.662399 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/d4da2b2f-cfd1-4d28-96fb-c7c93955a3b4-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"d4da2b2f-cfd1-4d28-96fb-c7c93955a3b4\") " pod="openstack/ceilometer-0" Sep 29 19:18:59 crc kubenswrapper[4792]: I0929 19:18:59.662433 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d4da2b2f-cfd1-4d28-96fb-c7c93955a3b4-run-httpd\") pod \"ceilometer-0\" (UID: \"d4da2b2f-cfd1-4d28-96fb-c7c93955a3b4\") " pod="openstack/ceilometer-0" Sep 29 19:18:59 crc kubenswrapper[4792]: I0929 19:18:59.662889 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d4da2b2f-cfd1-4d28-96fb-c7c93955a3b4-run-httpd\") pod \"ceilometer-0\" (UID: \"d4da2b2f-cfd1-4d28-96fb-c7c93955a3b4\") " pod="openstack/ceilometer-0" Sep 29 19:18:59 crc kubenswrapper[4792]: I0929 19:18:59.665088 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d4da2b2f-cfd1-4d28-96fb-c7c93955a3b4-log-httpd\") pod \"ceilometer-0\" (UID: \"d4da2b2f-cfd1-4d28-96fb-c7c93955a3b4\") " pod="openstack/ceilometer-0" Sep 29 19:18:59 crc kubenswrapper[4792]: I0929 19:18:59.665188 4792 scope.go:117] "RemoveContainer" containerID="66a2fb9afdf9489165d821300f81faadb832359e6f2c63753284156b550ac357" Sep 29 19:18:59 crc kubenswrapper[4792]: E0929 19:18:59.667053 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"66a2fb9afdf9489165d821300f81faadb832359e6f2c63753284156b550ac357\": container with ID starting with 66a2fb9afdf9489165d821300f81faadb832359e6f2c63753284156b550ac357 not found: ID does not exist" containerID="66a2fb9afdf9489165d821300f81faadb832359e6f2c63753284156b550ac357" Sep 29 19:18:59 crc kubenswrapper[4792]: I0929 19:18:59.667098 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"66a2fb9afdf9489165d821300f81faadb832359e6f2c63753284156b550ac357"} err="failed to get container status \"66a2fb9afdf9489165d821300f81faadb832359e6f2c63753284156b550ac357\": rpc error: code = NotFound desc = could not find container \"66a2fb9afdf9489165d821300f81faadb832359e6f2c63753284156b550ac357\": container with ID starting with 66a2fb9afdf9489165d821300f81faadb832359e6f2c63753284156b550ac357 not found: ID does not exist" Sep 29 19:18:59 crc kubenswrapper[4792]: I0929 19:18:59.667124 4792 scope.go:117] "RemoveContainer" containerID="9554ad31091d048147a1361f625613950b33dd3cf15f712af4d78c09030ea0df" Sep 29 19:18:59 crc kubenswrapper[4792]: I0929 19:18:59.673215 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d4da2b2f-cfd1-4d28-96fb-c7c93955a3b4-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"d4da2b2f-cfd1-4d28-96fb-c7c93955a3b4\") " pod="openstack/ceilometer-0" Sep 29 19:18:59 crc kubenswrapper[4792]: E0929 19:18:59.673306 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9554ad31091d048147a1361f625613950b33dd3cf15f712af4d78c09030ea0df\": container with ID starting with 9554ad31091d048147a1361f625613950b33dd3cf15f712af4d78c09030ea0df not found: ID does not exist" containerID="9554ad31091d048147a1361f625613950b33dd3cf15f712af4d78c09030ea0df" Sep 29 19:18:59 crc kubenswrapper[4792]: I0929 19:18:59.673329 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9554ad31091d048147a1361f625613950b33dd3cf15f712af4d78c09030ea0df"} err="failed to get container status \"9554ad31091d048147a1361f625613950b33dd3cf15f712af4d78c09030ea0df\": rpc error: code = NotFound desc = could not find container \"9554ad31091d048147a1361f625613950b33dd3cf15f712af4d78c09030ea0df\": container with ID starting with 9554ad31091d048147a1361f625613950b33dd3cf15f712af4d78c09030ea0df not found: ID does not exist" Sep 29 19:18:59 crc kubenswrapper[4792]: I0929 19:18:59.673490 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d4da2b2f-cfd1-4d28-96fb-c7c93955a3b4-scripts\") pod \"ceilometer-0\" (UID: \"d4da2b2f-cfd1-4d28-96fb-c7c93955a3b4\") " pod="openstack/ceilometer-0" Sep 29 19:18:59 crc kubenswrapper[4792]: I0929 19:18:59.679263 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/d4da2b2f-cfd1-4d28-96fb-c7c93955a3b4-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"d4da2b2f-cfd1-4d28-96fb-c7c93955a3b4\") " pod="openstack/ceilometer-0" Sep 29 19:18:59 crc kubenswrapper[4792]: I0929 19:18:59.689545 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/d4da2b2f-cfd1-4d28-96fb-c7c93955a3b4-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"d4da2b2f-cfd1-4d28-96fb-c7c93955a3b4\") " pod="openstack/ceilometer-0" Sep 29 19:18:59 crc kubenswrapper[4792]: I0929 19:18:59.690338 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4vvpc\" (UniqueName: \"kubernetes.io/projected/d4da2b2f-cfd1-4d28-96fb-c7c93955a3b4-kube-api-access-4vvpc\") pod \"ceilometer-0\" (UID: \"d4da2b2f-cfd1-4d28-96fb-c7c93955a3b4\") " pod="openstack/ceilometer-0" Sep 29 19:18:59 crc kubenswrapper[4792]: I0929 19:18:59.709565 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d4da2b2f-cfd1-4d28-96fb-c7c93955a3b4-config-data\") pod \"ceilometer-0\" (UID: \"d4da2b2f-cfd1-4d28-96fb-c7c93955a3b4\") " pod="openstack/ceilometer-0" Sep 29 19:18:59 crc kubenswrapper[4792]: I0929 19:18:59.772323 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Sep 29 19:18:59 crc kubenswrapper[4792]: I0929 19:18:59.781001 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Sep 29 19:18:59 crc kubenswrapper[4792]: I0929 19:18:59.800329 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Sep 29 19:18:59 crc kubenswrapper[4792]: I0929 19:18:59.803558 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Sep 29 19:18:59 crc kubenswrapper[4792]: I0929 19:18:59.806187 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-public-svc" Sep 29 19:18:59 crc kubenswrapper[4792]: I0929 19:18:59.807318 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-internal-svc" Sep 29 19:18:59 crc kubenswrapper[4792]: I0929 19:18:59.807506 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Sep 29 19:18:59 crc kubenswrapper[4792]: I0929 19:18:59.817646 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Sep 29 19:18:59 crc kubenswrapper[4792]: I0929 19:18:59.864653 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/c0164a3b-496a-40e7-a585-996878c0f653-internal-tls-certs\") pod \"nova-api-0\" (UID: \"c0164a3b-496a-40e7-a585-996878c0f653\") " pod="openstack/nova-api-0" Sep 29 19:18:59 crc kubenswrapper[4792]: I0929 19:18:59.864706 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c0164a3b-496a-40e7-a585-996878c0f653-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"c0164a3b-496a-40e7-a585-996878c0f653\") " pod="openstack/nova-api-0" Sep 29 19:18:59 crc kubenswrapper[4792]: I0929 19:18:59.864737 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rjtnx\" (UniqueName: \"kubernetes.io/projected/c0164a3b-496a-40e7-a585-996878c0f653-kube-api-access-rjtnx\") pod \"nova-api-0\" (UID: \"c0164a3b-496a-40e7-a585-996878c0f653\") " pod="openstack/nova-api-0" Sep 29 19:18:59 crc kubenswrapper[4792]: I0929 19:18:59.864771 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c0164a3b-496a-40e7-a585-996878c0f653-logs\") pod \"nova-api-0\" (UID: \"c0164a3b-496a-40e7-a585-996878c0f653\") " pod="openstack/nova-api-0" Sep 29 19:18:59 crc kubenswrapper[4792]: I0929 19:18:59.864834 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c0164a3b-496a-40e7-a585-996878c0f653-config-data\") pod \"nova-api-0\" (UID: \"c0164a3b-496a-40e7-a585-996878c0f653\") " pod="openstack/nova-api-0" Sep 29 19:18:59 crc kubenswrapper[4792]: I0929 19:18:59.864865 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/c0164a3b-496a-40e7-a585-996878c0f653-public-tls-certs\") pod \"nova-api-0\" (UID: \"c0164a3b-496a-40e7-a585-996878c0f653\") " pod="openstack/nova-api-0" Sep 29 19:18:59 crc kubenswrapper[4792]: I0929 19:18:59.901270 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 29 19:18:59 crc kubenswrapper[4792]: I0929 19:18:59.965973 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c0164a3b-496a-40e7-a585-996878c0f653-config-data\") pod \"nova-api-0\" (UID: \"c0164a3b-496a-40e7-a585-996878c0f653\") " pod="openstack/nova-api-0" Sep 29 19:18:59 crc kubenswrapper[4792]: I0929 19:18:59.966013 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/c0164a3b-496a-40e7-a585-996878c0f653-public-tls-certs\") pod \"nova-api-0\" (UID: \"c0164a3b-496a-40e7-a585-996878c0f653\") " pod="openstack/nova-api-0" Sep 29 19:18:59 crc kubenswrapper[4792]: I0929 19:18:59.966073 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/c0164a3b-496a-40e7-a585-996878c0f653-internal-tls-certs\") pod \"nova-api-0\" (UID: \"c0164a3b-496a-40e7-a585-996878c0f653\") " pod="openstack/nova-api-0" Sep 29 19:18:59 crc kubenswrapper[4792]: I0929 19:18:59.966107 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c0164a3b-496a-40e7-a585-996878c0f653-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"c0164a3b-496a-40e7-a585-996878c0f653\") " pod="openstack/nova-api-0" Sep 29 19:18:59 crc kubenswrapper[4792]: I0929 19:18:59.966133 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rjtnx\" (UniqueName: \"kubernetes.io/projected/c0164a3b-496a-40e7-a585-996878c0f653-kube-api-access-rjtnx\") pod \"nova-api-0\" (UID: \"c0164a3b-496a-40e7-a585-996878c0f653\") " pod="openstack/nova-api-0" Sep 29 19:18:59 crc kubenswrapper[4792]: I0929 19:18:59.966161 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c0164a3b-496a-40e7-a585-996878c0f653-logs\") pod \"nova-api-0\" (UID: \"c0164a3b-496a-40e7-a585-996878c0f653\") " pod="openstack/nova-api-0" Sep 29 19:18:59 crc kubenswrapper[4792]: I0929 19:18:59.966512 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c0164a3b-496a-40e7-a585-996878c0f653-logs\") pod \"nova-api-0\" (UID: \"c0164a3b-496a-40e7-a585-996878c0f653\") " pod="openstack/nova-api-0" Sep 29 19:18:59 crc kubenswrapper[4792]: I0929 19:18:59.972587 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/c0164a3b-496a-40e7-a585-996878c0f653-internal-tls-certs\") pod \"nova-api-0\" (UID: \"c0164a3b-496a-40e7-a585-996878c0f653\") " pod="openstack/nova-api-0" Sep 29 19:18:59 crc kubenswrapper[4792]: I0929 19:18:59.974359 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/c0164a3b-496a-40e7-a585-996878c0f653-public-tls-certs\") pod \"nova-api-0\" (UID: \"c0164a3b-496a-40e7-a585-996878c0f653\") " pod="openstack/nova-api-0" Sep 29 19:18:59 crc kubenswrapper[4792]: I0929 19:18:59.974417 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c0164a3b-496a-40e7-a585-996878c0f653-config-data\") pod \"nova-api-0\" (UID: \"c0164a3b-496a-40e7-a585-996878c0f653\") " pod="openstack/nova-api-0" Sep 29 19:18:59 crc kubenswrapper[4792]: I0929 19:18:59.974918 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c0164a3b-496a-40e7-a585-996878c0f653-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"c0164a3b-496a-40e7-a585-996878c0f653\") " pod="openstack/nova-api-0" Sep 29 19:18:59 crc kubenswrapper[4792]: I0929 19:18:59.988990 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rjtnx\" (UniqueName: \"kubernetes.io/projected/c0164a3b-496a-40e7-a585-996878c0f653-kube-api-access-rjtnx\") pod \"nova-api-0\" (UID: \"c0164a3b-496a-40e7-a585-996878c0f653\") " pod="openstack/nova-api-0" Sep 29 19:19:00 crc kubenswrapper[4792]: I0929 19:19:00.166559 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Sep 29 19:19:00 crc kubenswrapper[4792]: I0929 19:19:00.340870 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Sep 29 19:19:00 crc kubenswrapper[4792]: I0929 19:19:00.376121 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"d4da2b2f-cfd1-4d28-96fb-c7c93955a3b4","Type":"ContainerStarted","Data":"fa5989b1a4109bc5e33f5ea0b0533e9696bcf1ae3bb7ba1170bf8671f21715fd"} Sep 29 19:19:00 crc kubenswrapper[4792]: I0929 19:19:00.599384 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-cell1-novncproxy-0" Sep 29 19:19:00 crc kubenswrapper[4792]: I0929 19:19:00.625112 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-cell1-novncproxy-0" Sep 29 19:19:00 crc kubenswrapper[4792]: I0929 19:19:00.651418 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Sep 29 19:19:00 crc kubenswrapper[4792]: I0929 19:19:00.670884 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Sep 29 19:19:00 crc kubenswrapper[4792]: I0929 19:19:00.671562 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Sep 29 19:19:01 crc kubenswrapper[4792]: I0929 19:19:01.037568 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5d563b15-07e2-4115-b1fa-d397717d62cd" path="/var/lib/kubelet/pods/5d563b15-07e2-4115-b1fa-d397717d62cd/volumes" Sep 29 19:19:01 crc kubenswrapper[4792]: I0929 19:19:01.038483 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="91353c86-67a7-4198-9391-52f85954178b" path="/var/lib/kubelet/pods/91353c86-67a7-4198-9391-52f85954178b/volumes" Sep 29 19:19:01 crc kubenswrapper[4792]: I0929 19:19:01.405419 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"d4da2b2f-cfd1-4d28-96fb-c7c93955a3b4","Type":"ContainerStarted","Data":"f25e7b77f6444c24a19ad1286076823fca3281e3a74e169d29b65da57809310d"} Sep 29 19:19:01 crc kubenswrapper[4792]: I0929 19:19:01.408975 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"c0164a3b-496a-40e7-a585-996878c0f653","Type":"ContainerStarted","Data":"827d009635bd0b92d62f8637bffad8a9920f2053e246ed145ba8d03576df4ba8"} Sep 29 19:19:01 crc kubenswrapper[4792]: I0929 19:19:01.409000 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"c0164a3b-496a-40e7-a585-996878c0f653","Type":"ContainerStarted","Data":"3f103c3f8d8c80c6a75a41373801aa3506108303214e4e2cdcd440e45327b72f"} Sep 29 19:19:01 crc kubenswrapper[4792]: I0929 19:19:01.409011 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"c0164a3b-496a-40e7-a585-996878c0f653","Type":"ContainerStarted","Data":"1d1b44121f0893860c94d0f1024e394b3d6320a141eed16a81fae7a40af35bef"} Sep 29 19:19:01 crc kubenswrapper[4792]: I0929 19:19:01.454173 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell1-novncproxy-0" Sep 29 19:19:01 crc kubenswrapper[4792]: I0929 19:19:01.470756 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.470739453 podStartE2EDuration="2.470739453s" podCreationTimestamp="2025-09-29 19:18:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 19:19:01.446428092 +0000 UTC m=+1353.439735488" watchObservedRunningTime="2025-09-29 19:19:01.470739453 +0000 UTC m=+1353.464046849" Sep 29 19:19:01 crc kubenswrapper[4792]: I0929 19:19:01.656988 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-cell-mapping-w6z2z"] Sep 29 19:19:01 crc kubenswrapper[4792]: I0929 19:19:01.659638 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-w6z2z" Sep 29 19:19:01 crc kubenswrapper[4792]: I0929 19:19:01.664978 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-manage-scripts" Sep 29 19:19:01 crc kubenswrapper[4792]: I0929 19:19:01.665231 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-manage-config-data" Sep 29 19:19:01 crc kubenswrapper[4792]: I0929 19:19:01.685095 4792 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="303192e4-458c-4544-84a3-a49abf8a0be1" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.199:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Sep 29 19:19:01 crc kubenswrapper[4792]: I0929 19:19:01.685610 4792 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="303192e4-458c-4544-84a3-a49abf8a0be1" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.199:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Sep 29 19:19:01 crc kubenswrapper[4792]: I0929 19:19:01.689282 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-cell-mapping-w6z2z"] Sep 29 19:19:01 crc kubenswrapper[4792]: I0929 19:19:01.817434 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/94b5373f-d637-47ab-90eb-3d83e2a38886-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-w6z2z\" (UID: \"94b5373f-d637-47ab-90eb-3d83e2a38886\") " pod="openstack/nova-cell1-cell-mapping-w6z2z" Sep 29 19:19:01 crc kubenswrapper[4792]: I0929 19:19:01.817490 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/94b5373f-d637-47ab-90eb-3d83e2a38886-scripts\") pod \"nova-cell1-cell-mapping-w6z2z\" (UID: \"94b5373f-d637-47ab-90eb-3d83e2a38886\") " pod="openstack/nova-cell1-cell-mapping-w6z2z" Sep 29 19:19:01 crc kubenswrapper[4792]: I0929 19:19:01.817693 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f65wp\" (UniqueName: \"kubernetes.io/projected/94b5373f-d637-47ab-90eb-3d83e2a38886-kube-api-access-f65wp\") pod \"nova-cell1-cell-mapping-w6z2z\" (UID: \"94b5373f-d637-47ab-90eb-3d83e2a38886\") " pod="openstack/nova-cell1-cell-mapping-w6z2z" Sep 29 19:19:01 crc kubenswrapper[4792]: I0929 19:19:01.817787 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/94b5373f-d637-47ab-90eb-3d83e2a38886-config-data\") pod \"nova-cell1-cell-mapping-w6z2z\" (UID: \"94b5373f-d637-47ab-90eb-3d83e2a38886\") " pod="openstack/nova-cell1-cell-mapping-w6z2z" Sep 29 19:19:01 crc kubenswrapper[4792]: I0929 19:19:01.919747 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/94b5373f-d637-47ab-90eb-3d83e2a38886-config-data\") pod \"nova-cell1-cell-mapping-w6z2z\" (UID: \"94b5373f-d637-47ab-90eb-3d83e2a38886\") " pod="openstack/nova-cell1-cell-mapping-w6z2z" Sep 29 19:19:01 crc kubenswrapper[4792]: I0929 19:19:01.919883 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/94b5373f-d637-47ab-90eb-3d83e2a38886-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-w6z2z\" (UID: \"94b5373f-d637-47ab-90eb-3d83e2a38886\") " pod="openstack/nova-cell1-cell-mapping-w6z2z" Sep 29 19:19:01 crc kubenswrapper[4792]: I0929 19:19:01.919913 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/94b5373f-d637-47ab-90eb-3d83e2a38886-scripts\") pod \"nova-cell1-cell-mapping-w6z2z\" (UID: \"94b5373f-d637-47ab-90eb-3d83e2a38886\") " pod="openstack/nova-cell1-cell-mapping-w6z2z" Sep 29 19:19:01 crc kubenswrapper[4792]: I0929 19:19:01.919956 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f65wp\" (UniqueName: \"kubernetes.io/projected/94b5373f-d637-47ab-90eb-3d83e2a38886-kube-api-access-f65wp\") pod \"nova-cell1-cell-mapping-w6z2z\" (UID: \"94b5373f-d637-47ab-90eb-3d83e2a38886\") " pod="openstack/nova-cell1-cell-mapping-w6z2z" Sep 29 19:19:01 crc kubenswrapper[4792]: I0929 19:19:01.927813 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/94b5373f-d637-47ab-90eb-3d83e2a38886-scripts\") pod \"nova-cell1-cell-mapping-w6z2z\" (UID: \"94b5373f-d637-47ab-90eb-3d83e2a38886\") " pod="openstack/nova-cell1-cell-mapping-w6z2z" Sep 29 19:19:01 crc kubenswrapper[4792]: I0929 19:19:01.934643 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/94b5373f-d637-47ab-90eb-3d83e2a38886-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-w6z2z\" (UID: \"94b5373f-d637-47ab-90eb-3d83e2a38886\") " pod="openstack/nova-cell1-cell-mapping-w6z2z" Sep 29 19:19:01 crc kubenswrapper[4792]: I0929 19:19:01.938838 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/94b5373f-d637-47ab-90eb-3d83e2a38886-config-data\") pod \"nova-cell1-cell-mapping-w6z2z\" (UID: \"94b5373f-d637-47ab-90eb-3d83e2a38886\") " pod="openstack/nova-cell1-cell-mapping-w6z2z" Sep 29 19:19:01 crc kubenswrapper[4792]: I0929 19:19:01.942101 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f65wp\" (UniqueName: \"kubernetes.io/projected/94b5373f-d637-47ab-90eb-3d83e2a38886-kube-api-access-f65wp\") pod \"nova-cell1-cell-mapping-w6z2z\" (UID: \"94b5373f-d637-47ab-90eb-3d83e2a38886\") " pod="openstack/nova-cell1-cell-mapping-w6z2z" Sep 29 19:19:02 crc kubenswrapper[4792]: I0929 19:19:02.115997 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-w6z2z" Sep 29 19:19:02 crc kubenswrapper[4792]: I0929 19:19:02.430192 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"d4da2b2f-cfd1-4d28-96fb-c7c93955a3b4","Type":"ContainerStarted","Data":"6427227e0ba0448451a6d329b2e47cbc582a7d15736bd9e664d818a7080d56d2"} Sep 29 19:19:02 crc kubenswrapper[4792]: W0929 19:19:02.605109 4792 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod94b5373f_d637_47ab_90eb_3d83e2a38886.slice/crio-e31a08f8dc60558a0cbb173c3f3af22824758de492b9ed0acc249dbc24fd63b9 WatchSource:0}: Error finding container e31a08f8dc60558a0cbb173c3f3af22824758de492b9ed0acc249dbc24fd63b9: Status 404 returned error can't find the container with id e31a08f8dc60558a0cbb173c3f3af22824758de492b9ed0acc249dbc24fd63b9 Sep 29 19:19:02 crc kubenswrapper[4792]: I0929 19:19:02.608161 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-cell-mapping-w6z2z"] Sep 29 19:19:03 crc kubenswrapper[4792]: I0929 19:19:03.242866 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-cd5cbd7b9-jgj5c" Sep 29 19:19:03 crc kubenswrapper[4792]: I0929 19:19:03.298203 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-bccf8f775-5gflj"] Sep 29 19:19:03 crc kubenswrapper[4792]: I0929 19:19:03.298432 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-bccf8f775-5gflj" podUID="6c32ebe8-d6f1-4ce5-be8e-42cadbdfe789" containerName="dnsmasq-dns" containerID="cri-o://595a3a55ac7fb226b96ff0d1ba205c24fa16c8065d0979c12837fbd99fabbee6" gracePeriod=10 Sep 29 19:19:03 crc kubenswrapper[4792]: I0929 19:19:03.484435 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-w6z2z" event={"ID":"94b5373f-d637-47ab-90eb-3d83e2a38886","Type":"ContainerStarted","Data":"48dd98b540a3269fd78905a33f974f35a2c8eb8711921ab4ad616c802dbdd61e"} Sep 29 19:19:03 crc kubenswrapper[4792]: I0929 19:19:03.484495 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-w6z2z" event={"ID":"94b5373f-d637-47ab-90eb-3d83e2a38886","Type":"ContainerStarted","Data":"e31a08f8dc60558a0cbb173c3f3af22824758de492b9ed0acc249dbc24fd63b9"} Sep 29 19:19:03 crc kubenswrapper[4792]: I0929 19:19:03.518823 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-cell-mapping-w6z2z" podStartSLOduration=2.518795783 podStartE2EDuration="2.518795783s" podCreationTimestamp="2025-09-29 19:19:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 19:19:03.505362554 +0000 UTC m=+1355.498669960" watchObservedRunningTime="2025-09-29 19:19:03.518795783 +0000 UTC m=+1355.512103189" Sep 29 19:19:03 crc kubenswrapper[4792]: I0929 19:19:03.542054 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"d4da2b2f-cfd1-4d28-96fb-c7c93955a3b4","Type":"ContainerStarted","Data":"7fee300c69e4362fb3c8053caa71db4a3d56dc520e03e5442997b17b58a22f81"} Sep 29 19:19:03 crc kubenswrapper[4792]: I0929 19:19:03.571515 4792 generic.go:334] "Generic (PLEG): container finished" podID="6c32ebe8-d6f1-4ce5-be8e-42cadbdfe789" containerID="595a3a55ac7fb226b96ff0d1ba205c24fa16c8065d0979c12837fbd99fabbee6" exitCode=0 Sep 29 19:19:03 crc kubenswrapper[4792]: I0929 19:19:03.571562 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-bccf8f775-5gflj" event={"ID":"6c32ebe8-d6f1-4ce5-be8e-42cadbdfe789","Type":"ContainerDied","Data":"595a3a55ac7fb226b96ff0d1ba205c24fa16c8065d0979c12837fbd99fabbee6"} Sep 29 19:19:03 crc kubenswrapper[4792]: I0929 19:19:03.891961 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-bccf8f775-5gflj" Sep 29 19:19:03 crc kubenswrapper[4792]: I0929 19:19:03.957685 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/6c32ebe8-d6f1-4ce5-be8e-42cadbdfe789-dns-svc\") pod \"6c32ebe8-d6f1-4ce5-be8e-42cadbdfe789\" (UID: \"6c32ebe8-d6f1-4ce5-be8e-42cadbdfe789\") " Sep 29 19:19:03 crc kubenswrapper[4792]: I0929 19:19:03.957757 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/6c32ebe8-d6f1-4ce5-be8e-42cadbdfe789-ovsdbserver-nb\") pod \"6c32ebe8-d6f1-4ce5-be8e-42cadbdfe789\" (UID: \"6c32ebe8-d6f1-4ce5-be8e-42cadbdfe789\") " Sep 29 19:19:03 crc kubenswrapper[4792]: I0929 19:19:03.957968 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/6c32ebe8-d6f1-4ce5-be8e-42cadbdfe789-dns-swift-storage-0\") pod \"6c32ebe8-d6f1-4ce5-be8e-42cadbdfe789\" (UID: \"6c32ebe8-d6f1-4ce5-be8e-42cadbdfe789\") " Sep 29 19:19:03 crc kubenswrapper[4792]: I0929 19:19:03.958003 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/6c32ebe8-d6f1-4ce5-be8e-42cadbdfe789-ovsdbserver-sb\") pod \"6c32ebe8-d6f1-4ce5-be8e-42cadbdfe789\" (UID: \"6c32ebe8-d6f1-4ce5-be8e-42cadbdfe789\") " Sep 29 19:19:03 crc kubenswrapper[4792]: I0929 19:19:03.958032 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rgm54\" (UniqueName: \"kubernetes.io/projected/6c32ebe8-d6f1-4ce5-be8e-42cadbdfe789-kube-api-access-rgm54\") pod \"6c32ebe8-d6f1-4ce5-be8e-42cadbdfe789\" (UID: \"6c32ebe8-d6f1-4ce5-be8e-42cadbdfe789\") " Sep 29 19:19:03 crc kubenswrapper[4792]: I0929 19:19:03.958071 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6c32ebe8-d6f1-4ce5-be8e-42cadbdfe789-config\") pod \"6c32ebe8-d6f1-4ce5-be8e-42cadbdfe789\" (UID: \"6c32ebe8-d6f1-4ce5-be8e-42cadbdfe789\") " Sep 29 19:19:03 crc kubenswrapper[4792]: I0929 19:19:03.981082 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6c32ebe8-d6f1-4ce5-be8e-42cadbdfe789-kube-api-access-rgm54" (OuterVolumeSpecName: "kube-api-access-rgm54") pod "6c32ebe8-d6f1-4ce5-be8e-42cadbdfe789" (UID: "6c32ebe8-d6f1-4ce5-be8e-42cadbdfe789"). InnerVolumeSpecName "kube-api-access-rgm54". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:19:04 crc kubenswrapper[4792]: I0929 19:19:04.060591 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rgm54\" (UniqueName: \"kubernetes.io/projected/6c32ebe8-d6f1-4ce5-be8e-42cadbdfe789-kube-api-access-rgm54\") on node \"crc\" DevicePath \"\"" Sep 29 19:19:04 crc kubenswrapper[4792]: I0929 19:19:04.084291 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6c32ebe8-d6f1-4ce5-be8e-42cadbdfe789-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "6c32ebe8-d6f1-4ce5-be8e-42cadbdfe789" (UID: "6c32ebe8-d6f1-4ce5-be8e-42cadbdfe789"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:19:04 crc kubenswrapper[4792]: I0929 19:19:04.097334 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6c32ebe8-d6f1-4ce5-be8e-42cadbdfe789-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "6c32ebe8-d6f1-4ce5-be8e-42cadbdfe789" (UID: "6c32ebe8-d6f1-4ce5-be8e-42cadbdfe789"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:19:04 crc kubenswrapper[4792]: I0929 19:19:04.113609 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6c32ebe8-d6f1-4ce5-be8e-42cadbdfe789-config" (OuterVolumeSpecName: "config") pod "6c32ebe8-d6f1-4ce5-be8e-42cadbdfe789" (UID: "6c32ebe8-d6f1-4ce5-be8e-42cadbdfe789"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:19:04 crc kubenswrapper[4792]: I0929 19:19:04.120269 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6c32ebe8-d6f1-4ce5-be8e-42cadbdfe789-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "6c32ebe8-d6f1-4ce5-be8e-42cadbdfe789" (UID: "6c32ebe8-d6f1-4ce5-be8e-42cadbdfe789"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:19:04 crc kubenswrapper[4792]: I0929 19:19:04.137102 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6c32ebe8-d6f1-4ce5-be8e-42cadbdfe789-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "6c32ebe8-d6f1-4ce5-be8e-42cadbdfe789" (UID: "6c32ebe8-d6f1-4ce5-be8e-42cadbdfe789"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:19:04 crc kubenswrapper[4792]: I0929 19:19:04.162609 4792 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6c32ebe8-d6f1-4ce5-be8e-42cadbdfe789-config\") on node \"crc\" DevicePath \"\"" Sep 29 19:19:04 crc kubenswrapper[4792]: I0929 19:19:04.162639 4792 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/6c32ebe8-d6f1-4ce5-be8e-42cadbdfe789-dns-svc\") on node \"crc\" DevicePath \"\"" Sep 29 19:19:04 crc kubenswrapper[4792]: I0929 19:19:04.162652 4792 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/6c32ebe8-d6f1-4ce5-be8e-42cadbdfe789-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Sep 29 19:19:04 crc kubenswrapper[4792]: I0929 19:19:04.162662 4792 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/6c32ebe8-d6f1-4ce5-be8e-42cadbdfe789-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Sep 29 19:19:04 crc kubenswrapper[4792]: I0929 19:19:04.162671 4792 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/6c32ebe8-d6f1-4ce5-be8e-42cadbdfe789-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Sep 29 19:19:04 crc kubenswrapper[4792]: I0929 19:19:04.583090 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-bccf8f775-5gflj" Sep 29 19:19:04 crc kubenswrapper[4792]: I0929 19:19:04.583096 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-bccf8f775-5gflj" event={"ID":"6c32ebe8-d6f1-4ce5-be8e-42cadbdfe789","Type":"ContainerDied","Data":"73ab2df25048104ac7df626c6b98743115e66f74c56d5931e7be512c914862a7"} Sep 29 19:19:04 crc kubenswrapper[4792]: I0929 19:19:04.583489 4792 scope.go:117] "RemoveContainer" containerID="595a3a55ac7fb226b96ff0d1ba205c24fa16c8065d0979c12837fbd99fabbee6" Sep 29 19:19:04 crc kubenswrapper[4792]: I0929 19:19:04.605878 4792 scope.go:117] "RemoveContainer" containerID="9715e8ddda7b4e4778e7e2179fd1ce8a40c77edaccb014a951eedc0657ad8744" Sep 29 19:19:04 crc kubenswrapper[4792]: I0929 19:19:04.616878 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-bccf8f775-5gflj"] Sep 29 19:19:04 crc kubenswrapper[4792]: I0929 19:19:04.634192 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-bccf8f775-5gflj"] Sep 29 19:19:05 crc kubenswrapper[4792]: I0929 19:19:05.030705 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6c32ebe8-d6f1-4ce5-be8e-42cadbdfe789" path="/var/lib/kubelet/pods/6c32ebe8-d6f1-4ce5-be8e-42cadbdfe789/volumes" Sep 29 19:19:07 crc kubenswrapper[4792]: I0929 19:19:07.627098 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"d4da2b2f-cfd1-4d28-96fb-c7c93955a3b4","Type":"ContainerStarted","Data":"75c21d9f71f8184976bb964ccc2fef29161d2141ba1f8002e1091ac333ee43ea"} Sep 29 19:19:07 crc kubenswrapper[4792]: I0929 19:19:07.628872 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Sep 29 19:19:07 crc kubenswrapper[4792]: I0929 19:19:07.652602 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=1.9526083220000001 podStartE2EDuration="8.652587092s" podCreationTimestamp="2025-09-29 19:18:59 +0000 UTC" firstStartedPulling="2025-09-29 19:19:00.358934738 +0000 UTC m=+1352.352242134" lastFinishedPulling="2025-09-29 19:19:07.058913508 +0000 UTC m=+1359.052220904" observedRunningTime="2025-09-29 19:19:07.648243339 +0000 UTC m=+1359.641550755" watchObservedRunningTime="2025-09-29 19:19:07.652587092 +0000 UTC m=+1359.645894488" Sep 29 19:19:08 crc kubenswrapper[4792]: I0929 19:19:08.644568 4792 generic.go:334] "Generic (PLEG): container finished" podID="94b5373f-d637-47ab-90eb-3d83e2a38886" containerID="48dd98b540a3269fd78905a33f974f35a2c8eb8711921ab4ad616c802dbdd61e" exitCode=0 Sep 29 19:19:08 crc kubenswrapper[4792]: I0929 19:19:08.644808 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-w6z2z" event={"ID":"94b5373f-d637-47ab-90eb-3d83e2a38886","Type":"ContainerDied","Data":"48dd98b540a3269fd78905a33f974f35a2c8eb8711921ab4ad616c802dbdd61e"} Sep 29 19:19:10 crc kubenswrapper[4792]: I0929 19:19:10.034693 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-w6z2z" Sep 29 19:19:10 crc kubenswrapper[4792]: I0929 19:19:10.073446 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/94b5373f-d637-47ab-90eb-3d83e2a38886-combined-ca-bundle\") pod \"94b5373f-d637-47ab-90eb-3d83e2a38886\" (UID: \"94b5373f-d637-47ab-90eb-3d83e2a38886\") " Sep 29 19:19:10 crc kubenswrapper[4792]: I0929 19:19:10.073498 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-f65wp\" (UniqueName: \"kubernetes.io/projected/94b5373f-d637-47ab-90eb-3d83e2a38886-kube-api-access-f65wp\") pod \"94b5373f-d637-47ab-90eb-3d83e2a38886\" (UID: \"94b5373f-d637-47ab-90eb-3d83e2a38886\") " Sep 29 19:19:10 crc kubenswrapper[4792]: I0929 19:19:10.073535 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/94b5373f-d637-47ab-90eb-3d83e2a38886-config-data\") pod \"94b5373f-d637-47ab-90eb-3d83e2a38886\" (UID: \"94b5373f-d637-47ab-90eb-3d83e2a38886\") " Sep 29 19:19:10 crc kubenswrapper[4792]: I0929 19:19:10.073561 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/94b5373f-d637-47ab-90eb-3d83e2a38886-scripts\") pod \"94b5373f-d637-47ab-90eb-3d83e2a38886\" (UID: \"94b5373f-d637-47ab-90eb-3d83e2a38886\") " Sep 29 19:19:10 crc kubenswrapper[4792]: I0929 19:19:10.093924 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/94b5373f-d637-47ab-90eb-3d83e2a38886-kube-api-access-f65wp" (OuterVolumeSpecName: "kube-api-access-f65wp") pod "94b5373f-d637-47ab-90eb-3d83e2a38886" (UID: "94b5373f-d637-47ab-90eb-3d83e2a38886"). InnerVolumeSpecName "kube-api-access-f65wp". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:19:10 crc kubenswrapper[4792]: I0929 19:19:10.096092 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/94b5373f-d637-47ab-90eb-3d83e2a38886-scripts" (OuterVolumeSpecName: "scripts") pod "94b5373f-d637-47ab-90eb-3d83e2a38886" (UID: "94b5373f-d637-47ab-90eb-3d83e2a38886"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:19:10 crc kubenswrapper[4792]: I0929 19:19:10.114985 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/94b5373f-d637-47ab-90eb-3d83e2a38886-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "94b5373f-d637-47ab-90eb-3d83e2a38886" (UID: "94b5373f-d637-47ab-90eb-3d83e2a38886"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:19:10 crc kubenswrapper[4792]: I0929 19:19:10.122108 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/94b5373f-d637-47ab-90eb-3d83e2a38886-config-data" (OuterVolumeSpecName: "config-data") pod "94b5373f-d637-47ab-90eb-3d83e2a38886" (UID: "94b5373f-d637-47ab-90eb-3d83e2a38886"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:19:10 crc kubenswrapper[4792]: I0929 19:19:10.167361 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Sep 29 19:19:10 crc kubenswrapper[4792]: I0929 19:19:10.167875 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Sep 29 19:19:10 crc kubenswrapper[4792]: I0929 19:19:10.175756 4792 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/94b5373f-d637-47ab-90eb-3d83e2a38886-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 19:19:10 crc kubenswrapper[4792]: I0929 19:19:10.175787 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-f65wp\" (UniqueName: \"kubernetes.io/projected/94b5373f-d637-47ab-90eb-3d83e2a38886-kube-api-access-f65wp\") on node \"crc\" DevicePath \"\"" Sep 29 19:19:10 crc kubenswrapper[4792]: I0929 19:19:10.175801 4792 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/94b5373f-d637-47ab-90eb-3d83e2a38886-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 19:19:10 crc kubenswrapper[4792]: I0929 19:19:10.175812 4792 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/94b5373f-d637-47ab-90eb-3d83e2a38886-scripts\") on node \"crc\" DevicePath \"\"" Sep 29 19:19:10 crc kubenswrapper[4792]: I0929 19:19:10.664113 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-w6z2z" Sep 29 19:19:10 crc kubenswrapper[4792]: I0929 19:19:10.664125 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-w6z2z" event={"ID":"94b5373f-d637-47ab-90eb-3d83e2a38886","Type":"ContainerDied","Data":"e31a08f8dc60558a0cbb173c3f3af22824758de492b9ed0acc249dbc24fd63b9"} Sep 29 19:19:10 crc kubenswrapper[4792]: I0929 19:19:10.664175 4792 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e31a08f8dc60558a0cbb173c3f3af22824758de492b9ed0acc249dbc24fd63b9" Sep 29 19:19:10 crc kubenswrapper[4792]: I0929 19:19:10.683430 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Sep 29 19:19:10 crc kubenswrapper[4792]: I0929 19:19:10.704033 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Sep 29 19:19:10 crc kubenswrapper[4792]: I0929 19:19:10.709972 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Sep 29 19:19:10 crc kubenswrapper[4792]: I0929 19:19:10.866367 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Sep 29 19:19:10 crc kubenswrapper[4792]: I0929 19:19:10.896276 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Sep 29 19:19:10 crc kubenswrapper[4792]: I0929 19:19:10.896482 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="c0cab82e-384f-4a9d-bf43-a72af2f53e79" containerName="nova-scheduler-scheduler" containerID="cri-o://d3df929e2eb7f3131e1c1a1f54de0eee7f53e1a2366eb322351f8913f816143f" gracePeriod=30 Sep 29 19:19:10 crc kubenswrapper[4792]: I0929 19:19:10.955485 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Sep 29 19:19:11 crc kubenswrapper[4792]: I0929 19:19:11.180029 4792 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="c0164a3b-496a-40e7-a585-996878c0f653" containerName="nova-api-log" probeResult="failure" output="Get \"https://10.217.0.202:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Sep 29 19:19:11 crc kubenswrapper[4792]: I0929 19:19:11.180293 4792 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="c0164a3b-496a-40e7-a585-996878c0f653" containerName="nova-api-api" probeResult="failure" output="Get \"https://10.217.0.202:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Sep 29 19:19:11 crc kubenswrapper[4792]: E0929 19:19:11.370506 4792 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="d3df929e2eb7f3131e1c1a1f54de0eee7f53e1a2366eb322351f8913f816143f" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Sep 29 19:19:11 crc kubenswrapper[4792]: E0929 19:19:11.372441 4792 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="d3df929e2eb7f3131e1c1a1f54de0eee7f53e1a2366eb322351f8913f816143f" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Sep 29 19:19:11 crc kubenswrapper[4792]: E0929 19:19:11.373985 4792 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="d3df929e2eb7f3131e1c1a1f54de0eee7f53e1a2366eb322351f8913f816143f" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Sep 29 19:19:11 crc kubenswrapper[4792]: E0929 19:19:11.374054 4792 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/nova-scheduler-0" podUID="c0cab82e-384f-4a9d-bf43-a72af2f53e79" containerName="nova-scheduler-scheduler" Sep 29 19:19:11 crc kubenswrapper[4792]: I0929 19:19:11.670762 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="c0164a3b-496a-40e7-a585-996878c0f653" containerName="nova-api-log" containerID="cri-o://3f103c3f8d8c80c6a75a41373801aa3506108303214e4e2cdcd440e45327b72f" gracePeriod=30 Sep 29 19:19:11 crc kubenswrapper[4792]: I0929 19:19:11.670885 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="c0164a3b-496a-40e7-a585-996878c0f653" containerName="nova-api-api" containerID="cri-o://827d009635bd0b92d62f8637bffad8a9920f2053e246ed145ba8d03576df4ba8" gracePeriod=30 Sep 29 19:19:11 crc kubenswrapper[4792]: I0929 19:19:11.680352 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Sep 29 19:19:12 crc kubenswrapper[4792]: I0929 19:19:12.681175 4792 generic.go:334] "Generic (PLEG): container finished" podID="c0164a3b-496a-40e7-a585-996878c0f653" containerID="3f103c3f8d8c80c6a75a41373801aa3506108303214e4e2cdcd440e45327b72f" exitCode=143 Sep 29 19:19:12 crc kubenswrapper[4792]: I0929 19:19:12.681229 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"c0164a3b-496a-40e7-a585-996878c0f653","Type":"ContainerDied","Data":"3f103c3f8d8c80c6a75a41373801aa3506108303214e4e2cdcd440e45327b72f"} Sep 29 19:19:12 crc kubenswrapper[4792]: I0929 19:19:12.681424 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="303192e4-458c-4544-84a3-a49abf8a0be1" containerName="nova-metadata-log" containerID="cri-o://c15b0219fa9688ec0c1247f582223d9dacb8be3f2fb4182a965c45e38a251938" gracePeriod=30 Sep 29 19:19:12 crc kubenswrapper[4792]: I0929 19:19:12.681487 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="303192e4-458c-4544-84a3-a49abf8a0be1" containerName="nova-metadata-metadata" containerID="cri-o://2287d1ceee27214be3628b422ee71b94da8c72b9fea6e13053ba5b666bd18b7e" gracePeriod=30 Sep 29 19:19:13 crc kubenswrapper[4792]: I0929 19:19:13.693524 4792 generic.go:334] "Generic (PLEG): container finished" podID="303192e4-458c-4544-84a3-a49abf8a0be1" containerID="c15b0219fa9688ec0c1247f582223d9dacb8be3f2fb4182a965c45e38a251938" exitCode=143 Sep 29 19:19:13 crc kubenswrapper[4792]: I0929 19:19:13.693710 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"303192e4-458c-4544-84a3-a49abf8a0be1","Type":"ContainerDied","Data":"c15b0219fa9688ec0c1247f582223d9dacb8be3f2fb4182a965c45e38a251938"} Sep 29 19:19:15 crc kubenswrapper[4792]: I0929 19:19:15.572509 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Sep 29 19:19:15 crc kubenswrapper[4792]: I0929 19:19:15.694691 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c0cab82e-384f-4a9d-bf43-a72af2f53e79-config-data\") pod \"c0cab82e-384f-4a9d-bf43-a72af2f53e79\" (UID: \"c0cab82e-384f-4a9d-bf43-a72af2f53e79\") " Sep 29 19:19:15 crc kubenswrapper[4792]: I0929 19:19:15.696573 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c0cab82e-384f-4a9d-bf43-a72af2f53e79-combined-ca-bundle\") pod \"c0cab82e-384f-4a9d-bf43-a72af2f53e79\" (UID: \"c0cab82e-384f-4a9d-bf43-a72af2f53e79\") " Sep 29 19:19:15 crc kubenswrapper[4792]: I0929 19:19:15.696742 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gdf5z\" (UniqueName: \"kubernetes.io/projected/c0cab82e-384f-4a9d-bf43-a72af2f53e79-kube-api-access-gdf5z\") pod \"c0cab82e-384f-4a9d-bf43-a72af2f53e79\" (UID: \"c0cab82e-384f-4a9d-bf43-a72af2f53e79\") " Sep 29 19:19:15 crc kubenswrapper[4792]: I0929 19:19:15.702243 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c0cab82e-384f-4a9d-bf43-a72af2f53e79-kube-api-access-gdf5z" (OuterVolumeSpecName: "kube-api-access-gdf5z") pod "c0cab82e-384f-4a9d-bf43-a72af2f53e79" (UID: "c0cab82e-384f-4a9d-bf43-a72af2f53e79"). InnerVolumeSpecName "kube-api-access-gdf5z". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:19:15 crc kubenswrapper[4792]: I0929 19:19:15.732121 4792 generic.go:334] "Generic (PLEG): container finished" podID="c0cab82e-384f-4a9d-bf43-a72af2f53e79" containerID="d3df929e2eb7f3131e1c1a1f54de0eee7f53e1a2366eb322351f8913f816143f" exitCode=0 Sep 29 19:19:15 crc kubenswrapper[4792]: I0929 19:19:15.732175 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"c0cab82e-384f-4a9d-bf43-a72af2f53e79","Type":"ContainerDied","Data":"d3df929e2eb7f3131e1c1a1f54de0eee7f53e1a2366eb322351f8913f816143f"} Sep 29 19:19:15 crc kubenswrapper[4792]: I0929 19:19:15.732207 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"c0cab82e-384f-4a9d-bf43-a72af2f53e79","Type":"ContainerDied","Data":"c2de6d5b6fbaa344919a8f84c6987fe2b14b4892b9ac0b6e85bf017fee6b8012"} Sep 29 19:19:15 crc kubenswrapper[4792]: I0929 19:19:15.732228 4792 scope.go:117] "RemoveContainer" containerID="d3df929e2eb7f3131e1c1a1f54de0eee7f53e1a2366eb322351f8913f816143f" Sep 29 19:19:15 crc kubenswrapper[4792]: I0929 19:19:15.732403 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Sep 29 19:19:15 crc kubenswrapper[4792]: I0929 19:19:15.738786 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c0cab82e-384f-4a9d-bf43-a72af2f53e79-config-data" (OuterVolumeSpecName: "config-data") pod "c0cab82e-384f-4a9d-bf43-a72af2f53e79" (UID: "c0cab82e-384f-4a9d-bf43-a72af2f53e79"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:19:15 crc kubenswrapper[4792]: I0929 19:19:15.762070 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c0cab82e-384f-4a9d-bf43-a72af2f53e79-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "c0cab82e-384f-4a9d-bf43-a72af2f53e79" (UID: "c0cab82e-384f-4a9d-bf43-a72af2f53e79"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:19:15 crc kubenswrapper[4792]: I0929 19:19:15.798542 4792 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c0cab82e-384f-4a9d-bf43-a72af2f53e79-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 19:19:15 crc kubenswrapper[4792]: I0929 19:19:15.798585 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gdf5z\" (UniqueName: \"kubernetes.io/projected/c0cab82e-384f-4a9d-bf43-a72af2f53e79-kube-api-access-gdf5z\") on node \"crc\" DevicePath \"\"" Sep 29 19:19:15 crc kubenswrapper[4792]: I0929 19:19:15.798597 4792 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c0cab82e-384f-4a9d-bf43-a72af2f53e79-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 19:19:15 crc kubenswrapper[4792]: I0929 19:19:15.822691 4792 scope.go:117] "RemoveContainer" containerID="d3df929e2eb7f3131e1c1a1f54de0eee7f53e1a2366eb322351f8913f816143f" Sep 29 19:19:15 crc kubenswrapper[4792]: E0929 19:19:15.823245 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d3df929e2eb7f3131e1c1a1f54de0eee7f53e1a2366eb322351f8913f816143f\": container with ID starting with d3df929e2eb7f3131e1c1a1f54de0eee7f53e1a2366eb322351f8913f816143f not found: ID does not exist" containerID="d3df929e2eb7f3131e1c1a1f54de0eee7f53e1a2366eb322351f8913f816143f" Sep 29 19:19:15 crc kubenswrapper[4792]: I0929 19:19:15.823276 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d3df929e2eb7f3131e1c1a1f54de0eee7f53e1a2366eb322351f8913f816143f"} err="failed to get container status \"d3df929e2eb7f3131e1c1a1f54de0eee7f53e1a2366eb322351f8913f816143f\": rpc error: code = NotFound desc = could not find container \"d3df929e2eb7f3131e1c1a1f54de0eee7f53e1a2366eb322351f8913f816143f\": container with ID starting with d3df929e2eb7f3131e1c1a1f54de0eee7f53e1a2366eb322351f8913f816143f not found: ID does not exist" Sep 29 19:19:15 crc kubenswrapper[4792]: I0929 19:19:15.823583 4792 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/nova-metadata-0" podUID="303192e4-458c-4544-84a3-a49abf8a0be1" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.199:8775/\": read tcp 10.217.0.2:36134->10.217.0.199:8775: read: connection reset by peer" Sep 29 19:19:15 crc kubenswrapper[4792]: I0929 19:19:15.824013 4792 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/nova-metadata-0" podUID="303192e4-458c-4544-84a3-a49abf8a0be1" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.199:8775/\": read tcp 10.217.0.2:36136->10.217.0.199:8775: read: connection reset by peer" Sep 29 19:19:16 crc kubenswrapper[4792]: I0929 19:19:16.069124 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Sep 29 19:19:16 crc kubenswrapper[4792]: I0929 19:19:16.080965 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Sep 29 19:19:16 crc kubenswrapper[4792]: I0929 19:19:16.108719 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Sep 29 19:19:16 crc kubenswrapper[4792]: E0929 19:19:16.109333 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6c32ebe8-d6f1-4ce5-be8e-42cadbdfe789" containerName="init" Sep 29 19:19:16 crc kubenswrapper[4792]: I0929 19:19:16.109406 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="6c32ebe8-d6f1-4ce5-be8e-42cadbdfe789" containerName="init" Sep 29 19:19:16 crc kubenswrapper[4792]: E0929 19:19:16.109485 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6c32ebe8-d6f1-4ce5-be8e-42cadbdfe789" containerName="dnsmasq-dns" Sep 29 19:19:16 crc kubenswrapper[4792]: I0929 19:19:16.109539 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="6c32ebe8-d6f1-4ce5-be8e-42cadbdfe789" containerName="dnsmasq-dns" Sep 29 19:19:16 crc kubenswrapper[4792]: E0929 19:19:16.109601 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="94b5373f-d637-47ab-90eb-3d83e2a38886" containerName="nova-manage" Sep 29 19:19:16 crc kubenswrapper[4792]: I0929 19:19:16.109654 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="94b5373f-d637-47ab-90eb-3d83e2a38886" containerName="nova-manage" Sep 29 19:19:16 crc kubenswrapper[4792]: E0929 19:19:16.110612 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c0cab82e-384f-4a9d-bf43-a72af2f53e79" containerName="nova-scheduler-scheduler" Sep 29 19:19:16 crc kubenswrapper[4792]: I0929 19:19:16.110692 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="c0cab82e-384f-4a9d-bf43-a72af2f53e79" containerName="nova-scheduler-scheduler" Sep 29 19:19:16 crc kubenswrapper[4792]: I0929 19:19:16.111045 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="c0cab82e-384f-4a9d-bf43-a72af2f53e79" containerName="nova-scheduler-scheduler" Sep 29 19:19:16 crc kubenswrapper[4792]: I0929 19:19:16.111161 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="94b5373f-d637-47ab-90eb-3d83e2a38886" containerName="nova-manage" Sep 29 19:19:16 crc kubenswrapper[4792]: I0929 19:19:16.111275 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="6c32ebe8-d6f1-4ce5-be8e-42cadbdfe789" containerName="dnsmasq-dns" Sep 29 19:19:16 crc kubenswrapper[4792]: I0929 19:19:16.112163 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Sep 29 19:19:16 crc kubenswrapper[4792]: I0929 19:19:16.116550 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Sep 29 19:19:16 crc kubenswrapper[4792]: I0929 19:19:16.124025 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Sep 29 19:19:16 crc kubenswrapper[4792]: I0929 19:19:16.208541 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t64q8\" (UniqueName: \"kubernetes.io/projected/38e4e8fe-a752-4d8c-aea2-07c6a92a7216-kube-api-access-t64q8\") pod \"nova-scheduler-0\" (UID: \"38e4e8fe-a752-4d8c-aea2-07c6a92a7216\") " pod="openstack/nova-scheduler-0" Sep 29 19:19:16 crc kubenswrapper[4792]: I0929 19:19:16.208665 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/38e4e8fe-a752-4d8c-aea2-07c6a92a7216-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"38e4e8fe-a752-4d8c-aea2-07c6a92a7216\") " pod="openstack/nova-scheduler-0" Sep 29 19:19:16 crc kubenswrapper[4792]: I0929 19:19:16.208709 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/38e4e8fe-a752-4d8c-aea2-07c6a92a7216-config-data\") pod \"nova-scheduler-0\" (UID: \"38e4e8fe-a752-4d8c-aea2-07c6a92a7216\") " pod="openstack/nova-scheduler-0" Sep 29 19:19:16 crc kubenswrapper[4792]: I0929 19:19:16.308750 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Sep 29 19:19:16 crc kubenswrapper[4792]: I0929 19:19:16.310687 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t64q8\" (UniqueName: \"kubernetes.io/projected/38e4e8fe-a752-4d8c-aea2-07c6a92a7216-kube-api-access-t64q8\") pod \"nova-scheduler-0\" (UID: \"38e4e8fe-a752-4d8c-aea2-07c6a92a7216\") " pod="openstack/nova-scheduler-0" Sep 29 19:19:16 crc kubenswrapper[4792]: I0929 19:19:16.310900 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/38e4e8fe-a752-4d8c-aea2-07c6a92a7216-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"38e4e8fe-a752-4d8c-aea2-07c6a92a7216\") " pod="openstack/nova-scheduler-0" Sep 29 19:19:16 crc kubenswrapper[4792]: I0929 19:19:16.311137 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/38e4e8fe-a752-4d8c-aea2-07c6a92a7216-config-data\") pod \"nova-scheduler-0\" (UID: \"38e4e8fe-a752-4d8c-aea2-07c6a92a7216\") " pod="openstack/nova-scheduler-0" Sep 29 19:19:16 crc kubenswrapper[4792]: I0929 19:19:16.328514 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/38e4e8fe-a752-4d8c-aea2-07c6a92a7216-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"38e4e8fe-a752-4d8c-aea2-07c6a92a7216\") " pod="openstack/nova-scheduler-0" Sep 29 19:19:16 crc kubenswrapper[4792]: I0929 19:19:16.337878 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/38e4e8fe-a752-4d8c-aea2-07c6a92a7216-config-data\") pod \"nova-scheduler-0\" (UID: \"38e4e8fe-a752-4d8c-aea2-07c6a92a7216\") " pod="openstack/nova-scheduler-0" Sep 29 19:19:16 crc kubenswrapper[4792]: I0929 19:19:16.363186 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t64q8\" (UniqueName: \"kubernetes.io/projected/38e4e8fe-a752-4d8c-aea2-07c6a92a7216-kube-api-access-t64q8\") pod \"nova-scheduler-0\" (UID: \"38e4e8fe-a752-4d8c-aea2-07c6a92a7216\") " pod="openstack/nova-scheduler-0" Sep 29 19:19:16 crc kubenswrapper[4792]: I0929 19:19:16.412418 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/303192e4-458c-4544-84a3-a49abf8a0be1-logs\") pod \"303192e4-458c-4544-84a3-a49abf8a0be1\" (UID: \"303192e4-458c-4544-84a3-a49abf8a0be1\") " Sep 29 19:19:16 crc kubenswrapper[4792]: I0929 19:19:16.412541 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/303192e4-458c-4544-84a3-a49abf8a0be1-combined-ca-bundle\") pod \"303192e4-458c-4544-84a3-a49abf8a0be1\" (UID: \"303192e4-458c-4544-84a3-a49abf8a0be1\") " Sep 29 19:19:16 crc kubenswrapper[4792]: I0929 19:19:16.412656 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w22p6\" (UniqueName: \"kubernetes.io/projected/303192e4-458c-4544-84a3-a49abf8a0be1-kube-api-access-w22p6\") pod \"303192e4-458c-4544-84a3-a49abf8a0be1\" (UID: \"303192e4-458c-4544-84a3-a49abf8a0be1\") " Sep 29 19:19:16 crc kubenswrapper[4792]: I0929 19:19:16.412679 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/303192e4-458c-4544-84a3-a49abf8a0be1-nova-metadata-tls-certs\") pod \"303192e4-458c-4544-84a3-a49abf8a0be1\" (UID: \"303192e4-458c-4544-84a3-a49abf8a0be1\") " Sep 29 19:19:16 crc kubenswrapper[4792]: I0929 19:19:16.412727 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/303192e4-458c-4544-84a3-a49abf8a0be1-config-data\") pod \"303192e4-458c-4544-84a3-a49abf8a0be1\" (UID: \"303192e4-458c-4544-84a3-a49abf8a0be1\") " Sep 29 19:19:16 crc kubenswrapper[4792]: I0929 19:19:16.416370 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/303192e4-458c-4544-84a3-a49abf8a0be1-logs" (OuterVolumeSpecName: "logs") pod "303192e4-458c-4544-84a3-a49abf8a0be1" (UID: "303192e4-458c-4544-84a3-a49abf8a0be1"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 19:19:16 crc kubenswrapper[4792]: I0929 19:19:16.451217 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/303192e4-458c-4544-84a3-a49abf8a0be1-kube-api-access-w22p6" (OuterVolumeSpecName: "kube-api-access-w22p6") pod "303192e4-458c-4544-84a3-a49abf8a0be1" (UID: "303192e4-458c-4544-84a3-a49abf8a0be1"). InnerVolumeSpecName "kube-api-access-w22p6". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:19:16 crc kubenswrapper[4792]: I0929 19:19:16.451719 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Sep 29 19:19:16 crc kubenswrapper[4792]: I0929 19:19:16.477818 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/303192e4-458c-4544-84a3-a49abf8a0be1-config-data" (OuterVolumeSpecName: "config-data") pod "303192e4-458c-4544-84a3-a49abf8a0be1" (UID: "303192e4-458c-4544-84a3-a49abf8a0be1"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:19:16 crc kubenswrapper[4792]: I0929 19:19:16.481095 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/303192e4-458c-4544-84a3-a49abf8a0be1-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "303192e4-458c-4544-84a3-a49abf8a0be1" (UID: "303192e4-458c-4544-84a3-a49abf8a0be1"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:19:16 crc kubenswrapper[4792]: I0929 19:19:16.513148 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/303192e4-458c-4544-84a3-a49abf8a0be1-nova-metadata-tls-certs" (OuterVolumeSpecName: "nova-metadata-tls-certs") pod "303192e4-458c-4544-84a3-a49abf8a0be1" (UID: "303192e4-458c-4544-84a3-a49abf8a0be1"). InnerVolumeSpecName "nova-metadata-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:19:16 crc kubenswrapper[4792]: I0929 19:19:16.515261 4792 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/303192e4-458c-4544-84a3-a49abf8a0be1-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 19:19:16 crc kubenswrapper[4792]: I0929 19:19:16.515293 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w22p6\" (UniqueName: \"kubernetes.io/projected/303192e4-458c-4544-84a3-a49abf8a0be1-kube-api-access-w22p6\") on node \"crc\" DevicePath \"\"" Sep 29 19:19:16 crc kubenswrapper[4792]: I0929 19:19:16.515307 4792 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/303192e4-458c-4544-84a3-a49abf8a0be1-nova-metadata-tls-certs\") on node \"crc\" DevicePath \"\"" Sep 29 19:19:16 crc kubenswrapper[4792]: I0929 19:19:16.515319 4792 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/303192e4-458c-4544-84a3-a49abf8a0be1-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 19:19:16 crc kubenswrapper[4792]: I0929 19:19:16.515329 4792 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/303192e4-458c-4544-84a3-a49abf8a0be1-logs\") on node \"crc\" DevicePath \"\"" Sep 29 19:19:16 crc kubenswrapper[4792]: I0929 19:19:16.743158 4792 generic.go:334] "Generic (PLEG): container finished" podID="303192e4-458c-4544-84a3-a49abf8a0be1" containerID="2287d1ceee27214be3628b422ee71b94da8c72b9fea6e13053ba5b666bd18b7e" exitCode=0 Sep 29 19:19:16 crc kubenswrapper[4792]: I0929 19:19:16.743229 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Sep 29 19:19:16 crc kubenswrapper[4792]: I0929 19:19:16.743251 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"303192e4-458c-4544-84a3-a49abf8a0be1","Type":"ContainerDied","Data":"2287d1ceee27214be3628b422ee71b94da8c72b9fea6e13053ba5b666bd18b7e"} Sep 29 19:19:16 crc kubenswrapper[4792]: I0929 19:19:16.747134 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"303192e4-458c-4544-84a3-a49abf8a0be1","Type":"ContainerDied","Data":"6f189aafc58889ea9456cda863e99248b9a98193ca59cd471e60274974783f3a"} Sep 29 19:19:16 crc kubenswrapper[4792]: I0929 19:19:16.747165 4792 scope.go:117] "RemoveContainer" containerID="2287d1ceee27214be3628b422ee71b94da8c72b9fea6e13053ba5b666bd18b7e" Sep 29 19:19:16 crc kubenswrapper[4792]: I0929 19:19:16.783949 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Sep 29 19:19:16 crc kubenswrapper[4792]: I0929 19:19:16.791467 4792 scope.go:117] "RemoveContainer" containerID="c15b0219fa9688ec0c1247f582223d9dacb8be3f2fb4182a965c45e38a251938" Sep 29 19:19:16 crc kubenswrapper[4792]: I0929 19:19:16.795879 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Sep 29 19:19:16 crc kubenswrapper[4792]: I0929 19:19:16.810521 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Sep 29 19:19:16 crc kubenswrapper[4792]: E0929 19:19:16.811006 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="303192e4-458c-4544-84a3-a49abf8a0be1" containerName="nova-metadata-metadata" Sep 29 19:19:16 crc kubenswrapper[4792]: I0929 19:19:16.811021 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="303192e4-458c-4544-84a3-a49abf8a0be1" containerName="nova-metadata-metadata" Sep 29 19:19:16 crc kubenswrapper[4792]: E0929 19:19:16.811064 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="303192e4-458c-4544-84a3-a49abf8a0be1" containerName="nova-metadata-log" Sep 29 19:19:16 crc kubenswrapper[4792]: I0929 19:19:16.811071 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="303192e4-458c-4544-84a3-a49abf8a0be1" containerName="nova-metadata-log" Sep 29 19:19:16 crc kubenswrapper[4792]: I0929 19:19:16.811252 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="303192e4-458c-4544-84a3-a49abf8a0be1" containerName="nova-metadata-metadata" Sep 29 19:19:16 crc kubenswrapper[4792]: I0929 19:19:16.811268 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="303192e4-458c-4544-84a3-a49abf8a0be1" containerName="nova-metadata-log" Sep 29 19:19:16 crc kubenswrapper[4792]: I0929 19:19:16.812363 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Sep 29 19:19:16 crc kubenswrapper[4792]: I0929 19:19:16.817701 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Sep 29 19:19:16 crc kubenswrapper[4792]: I0929 19:19:16.817917 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Sep 29 19:19:16 crc kubenswrapper[4792]: I0929 19:19:16.834252 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Sep 29 19:19:16 crc kubenswrapper[4792]: I0929 19:19:16.849923 4792 scope.go:117] "RemoveContainer" containerID="2287d1ceee27214be3628b422ee71b94da8c72b9fea6e13053ba5b666bd18b7e" Sep 29 19:19:16 crc kubenswrapper[4792]: E0929 19:19:16.850828 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2287d1ceee27214be3628b422ee71b94da8c72b9fea6e13053ba5b666bd18b7e\": container with ID starting with 2287d1ceee27214be3628b422ee71b94da8c72b9fea6e13053ba5b666bd18b7e not found: ID does not exist" containerID="2287d1ceee27214be3628b422ee71b94da8c72b9fea6e13053ba5b666bd18b7e" Sep 29 19:19:16 crc kubenswrapper[4792]: I0929 19:19:16.851047 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2287d1ceee27214be3628b422ee71b94da8c72b9fea6e13053ba5b666bd18b7e"} err="failed to get container status \"2287d1ceee27214be3628b422ee71b94da8c72b9fea6e13053ba5b666bd18b7e\": rpc error: code = NotFound desc = could not find container \"2287d1ceee27214be3628b422ee71b94da8c72b9fea6e13053ba5b666bd18b7e\": container with ID starting with 2287d1ceee27214be3628b422ee71b94da8c72b9fea6e13053ba5b666bd18b7e not found: ID does not exist" Sep 29 19:19:16 crc kubenswrapper[4792]: I0929 19:19:16.851080 4792 scope.go:117] "RemoveContainer" containerID="c15b0219fa9688ec0c1247f582223d9dacb8be3f2fb4182a965c45e38a251938" Sep 29 19:19:16 crc kubenswrapper[4792]: E0929 19:19:16.855400 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c15b0219fa9688ec0c1247f582223d9dacb8be3f2fb4182a965c45e38a251938\": container with ID starting with c15b0219fa9688ec0c1247f582223d9dacb8be3f2fb4182a965c45e38a251938 not found: ID does not exist" containerID="c15b0219fa9688ec0c1247f582223d9dacb8be3f2fb4182a965c45e38a251938" Sep 29 19:19:16 crc kubenswrapper[4792]: I0929 19:19:16.855526 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c15b0219fa9688ec0c1247f582223d9dacb8be3f2fb4182a965c45e38a251938"} err="failed to get container status \"c15b0219fa9688ec0c1247f582223d9dacb8be3f2fb4182a965c45e38a251938\": rpc error: code = NotFound desc = could not find container \"c15b0219fa9688ec0c1247f582223d9dacb8be3f2fb4182a965c45e38a251938\": container with ID starting with c15b0219fa9688ec0c1247f582223d9dacb8be3f2fb4182a965c45e38a251938 not found: ID does not exist" Sep 29 19:19:16 crc kubenswrapper[4792]: I0929 19:19:16.925545 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/de3fc643-e567-4ae6-b446-f861b63822d7-logs\") pod \"nova-metadata-0\" (UID: \"de3fc643-e567-4ae6-b446-f861b63822d7\") " pod="openstack/nova-metadata-0" Sep 29 19:19:16 crc kubenswrapper[4792]: I0929 19:19:16.925603 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/de3fc643-e567-4ae6-b446-f861b63822d7-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"de3fc643-e567-4ae6-b446-f861b63822d7\") " pod="openstack/nova-metadata-0" Sep 29 19:19:16 crc kubenswrapper[4792]: I0929 19:19:16.925645 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/de3fc643-e567-4ae6-b446-f861b63822d7-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"de3fc643-e567-4ae6-b446-f861b63822d7\") " pod="openstack/nova-metadata-0" Sep 29 19:19:16 crc kubenswrapper[4792]: I0929 19:19:16.925706 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-29pg9\" (UniqueName: \"kubernetes.io/projected/de3fc643-e567-4ae6-b446-f861b63822d7-kube-api-access-29pg9\") pod \"nova-metadata-0\" (UID: \"de3fc643-e567-4ae6-b446-f861b63822d7\") " pod="openstack/nova-metadata-0" Sep 29 19:19:16 crc kubenswrapper[4792]: I0929 19:19:16.925801 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/de3fc643-e567-4ae6-b446-f861b63822d7-config-data\") pod \"nova-metadata-0\" (UID: \"de3fc643-e567-4ae6-b446-f861b63822d7\") " pod="openstack/nova-metadata-0" Sep 29 19:19:16 crc kubenswrapper[4792]: I0929 19:19:16.928670 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Sep 29 19:19:16 crc kubenswrapper[4792]: W0929 19:19:16.948866 4792 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod38e4e8fe_a752_4d8c_aea2_07c6a92a7216.slice/crio-b5994f6f3cd06e71b0881c09e22a830980234007ec633c93d562a178be743cb9 WatchSource:0}: Error finding container b5994f6f3cd06e71b0881c09e22a830980234007ec633c93d562a178be743cb9: Status 404 returned error can't find the container with id b5994f6f3cd06e71b0881c09e22a830980234007ec633c93d562a178be743cb9 Sep 29 19:19:17 crc kubenswrapper[4792]: I0929 19:19:17.028519 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-29pg9\" (UniqueName: \"kubernetes.io/projected/de3fc643-e567-4ae6-b446-f861b63822d7-kube-api-access-29pg9\") pod \"nova-metadata-0\" (UID: \"de3fc643-e567-4ae6-b446-f861b63822d7\") " pod="openstack/nova-metadata-0" Sep 29 19:19:17 crc kubenswrapper[4792]: I0929 19:19:17.028927 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/de3fc643-e567-4ae6-b446-f861b63822d7-config-data\") pod \"nova-metadata-0\" (UID: \"de3fc643-e567-4ae6-b446-f861b63822d7\") " pod="openstack/nova-metadata-0" Sep 29 19:19:17 crc kubenswrapper[4792]: I0929 19:19:17.029183 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/de3fc643-e567-4ae6-b446-f861b63822d7-logs\") pod \"nova-metadata-0\" (UID: \"de3fc643-e567-4ae6-b446-f861b63822d7\") " pod="openstack/nova-metadata-0" Sep 29 19:19:17 crc kubenswrapper[4792]: I0929 19:19:17.029230 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/de3fc643-e567-4ae6-b446-f861b63822d7-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"de3fc643-e567-4ae6-b446-f861b63822d7\") " pod="openstack/nova-metadata-0" Sep 29 19:19:17 crc kubenswrapper[4792]: I0929 19:19:17.029255 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/de3fc643-e567-4ae6-b446-f861b63822d7-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"de3fc643-e567-4ae6-b446-f861b63822d7\") " pod="openstack/nova-metadata-0" Sep 29 19:19:17 crc kubenswrapper[4792]: I0929 19:19:17.029936 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/de3fc643-e567-4ae6-b446-f861b63822d7-logs\") pod \"nova-metadata-0\" (UID: \"de3fc643-e567-4ae6-b446-f861b63822d7\") " pod="openstack/nova-metadata-0" Sep 29 19:19:17 crc kubenswrapper[4792]: I0929 19:19:17.031436 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="303192e4-458c-4544-84a3-a49abf8a0be1" path="/var/lib/kubelet/pods/303192e4-458c-4544-84a3-a49abf8a0be1/volumes" Sep 29 19:19:17 crc kubenswrapper[4792]: I0929 19:19:17.032191 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c0cab82e-384f-4a9d-bf43-a72af2f53e79" path="/var/lib/kubelet/pods/c0cab82e-384f-4a9d-bf43-a72af2f53e79/volumes" Sep 29 19:19:17 crc kubenswrapper[4792]: I0929 19:19:17.034331 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/de3fc643-e567-4ae6-b446-f861b63822d7-config-data\") pod \"nova-metadata-0\" (UID: \"de3fc643-e567-4ae6-b446-f861b63822d7\") " pod="openstack/nova-metadata-0" Sep 29 19:19:17 crc kubenswrapper[4792]: I0929 19:19:17.037376 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/de3fc643-e567-4ae6-b446-f861b63822d7-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"de3fc643-e567-4ae6-b446-f861b63822d7\") " pod="openstack/nova-metadata-0" Sep 29 19:19:17 crc kubenswrapper[4792]: I0929 19:19:17.040678 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/de3fc643-e567-4ae6-b446-f861b63822d7-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"de3fc643-e567-4ae6-b446-f861b63822d7\") " pod="openstack/nova-metadata-0" Sep 29 19:19:17 crc kubenswrapper[4792]: I0929 19:19:17.048281 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-29pg9\" (UniqueName: \"kubernetes.io/projected/de3fc643-e567-4ae6-b446-f861b63822d7-kube-api-access-29pg9\") pod \"nova-metadata-0\" (UID: \"de3fc643-e567-4ae6-b446-f861b63822d7\") " pod="openstack/nova-metadata-0" Sep 29 19:19:17 crc kubenswrapper[4792]: I0929 19:19:17.135947 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Sep 29 19:19:17 crc kubenswrapper[4792]: I0929 19:19:17.516844 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Sep 29 19:19:17 crc kubenswrapper[4792]: I0929 19:19:17.645670 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c0164a3b-496a-40e7-a585-996878c0f653-combined-ca-bundle\") pod \"c0164a3b-496a-40e7-a585-996878c0f653\" (UID: \"c0164a3b-496a-40e7-a585-996878c0f653\") " Sep 29 19:19:17 crc kubenswrapper[4792]: I0929 19:19:17.645715 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c0164a3b-496a-40e7-a585-996878c0f653-config-data\") pod \"c0164a3b-496a-40e7-a585-996878c0f653\" (UID: \"c0164a3b-496a-40e7-a585-996878c0f653\") " Sep 29 19:19:17 crc kubenswrapper[4792]: I0929 19:19:17.645781 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rjtnx\" (UniqueName: \"kubernetes.io/projected/c0164a3b-496a-40e7-a585-996878c0f653-kube-api-access-rjtnx\") pod \"c0164a3b-496a-40e7-a585-996878c0f653\" (UID: \"c0164a3b-496a-40e7-a585-996878c0f653\") " Sep 29 19:19:17 crc kubenswrapper[4792]: I0929 19:19:17.645968 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/c0164a3b-496a-40e7-a585-996878c0f653-public-tls-certs\") pod \"c0164a3b-496a-40e7-a585-996878c0f653\" (UID: \"c0164a3b-496a-40e7-a585-996878c0f653\") " Sep 29 19:19:17 crc kubenswrapper[4792]: I0929 19:19:17.646172 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c0164a3b-496a-40e7-a585-996878c0f653-logs\") pod \"c0164a3b-496a-40e7-a585-996878c0f653\" (UID: \"c0164a3b-496a-40e7-a585-996878c0f653\") " Sep 29 19:19:17 crc kubenswrapper[4792]: I0929 19:19:17.646222 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/c0164a3b-496a-40e7-a585-996878c0f653-internal-tls-certs\") pod \"c0164a3b-496a-40e7-a585-996878c0f653\" (UID: \"c0164a3b-496a-40e7-a585-996878c0f653\") " Sep 29 19:19:17 crc kubenswrapper[4792]: I0929 19:19:17.646866 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c0164a3b-496a-40e7-a585-996878c0f653-logs" (OuterVolumeSpecName: "logs") pod "c0164a3b-496a-40e7-a585-996878c0f653" (UID: "c0164a3b-496a-40e7-a585-996878c0f653"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 19:19:17 crc kubenswrapper[4792]: I0929 19:19:17.661575 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c0164a3b-496a-40e7-a585-996878c0f653-kube-api-access-rjtnx" (OuterVolumeSpecName: "kube-api-access-rjtnx") pod "c0164a3b-496a-40e7-a585-996878c0f653" (UID: "c0164a3b-496a-40e7-a585-996878c0f653"). InnerVolumeSpecName "kube-api-access-rjtnx". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:19:17 crc kubenswrapper[4792]: I0929 19:19:17.688255 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c0164a3b-496a-40e7-a585-996878c0f653-config-data" (OuterVolumeSpecName: "config-data") pod "c0164a3b-496a-40e7-a585-996878c0f653" (UID: "c0164a3b-496a-40e7-a585-996878c0f653"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:19:17 crc kubenswrapper[4792]: I0929 19:19:17.706083 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Sep 29 19:19:17 crc kubenswrapper[4792]: I0929 19:19:17.728431 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c0164a3b-496a-40e7-a585-996878c0f653-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "c0164a3b-496a-40e7-a585-996878c0f653" (UID: "c0164a3b-496a-40e7-a585-996878c0f653"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:19:17 crc kubenswrapper[4792]: I0929 19:19:17.742124 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c0164a3b-496a-40e7-a585-996878c0f653-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "c0164a3b-496a-40e7-a585-996878c0f653" (UID: "c0164a3b-496a-40e7-a585-996878c0f653"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:19:17 crc kubenswrapper[4792]: I0929 19:19:17.748690 4792 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/c0164a3b-496a-40e7-a585-996878c0f653-public-tls-certs\") on node \"crc\" DevicePath \"\"" Sep 29 19:19:17 crc kubenswrapper[4792]: I0929 19:19:17.748729 4792 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c0164a3b-496a-40e7-a585-996878c0f653-logs\") on node \"crc\" DevicePath \"\"" Sep 29 19:19:17 crc kubenswrapper[4792]: I0929 19:19:17.748742 4792 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c0164a3b-496a-40e7-a585-996878c0f653-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 19:19:17 crc kubenswrapper[4792]: I0929 19:19:17.748758 4792 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c0164a3b-496a-40e7-a585-996878c0f653-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 19:19:17 crc kubenswrapper[4792]: I0929 19:19:17.748772 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rjtnx\" (UniqueName: \"kubernetes.io/projected/c0164a3b-496a-40e7-a585-996878c0f653-kube-api-access-rjtnx\") on node \"crc\" DevicePath \"\"" Sep 29 19:19:17 crc kubenswrapper[4792]: I0929 19:19:17.761120 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c0164a3b-496a-40e7-a585-996878c0f653-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "c0164a3b-496a-40e7-a585-996878c0f653" (UID: "c0164a3b-496a-40e7-a585-996878c0f653"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:19:17 crc kubenswrapper[4792]: I0929 19:19:17.766086 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"38e4e8fe-a752-4d8c-aea2-07c6a92a7216","Type":"ContainerStarted","Data":"0e84433564583333adf3bf80dc91ca1c9f64feff0e8961e5ae9c60f87e033aa8"} Sep 29 19:19:17 crc kubenswrapper[4792]: I0929 19:19:17.766137 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"38e4e8fe-a752-4d8c-aea2-07c6a92a7216","Type":"ContainerStarted","Data":"b5994f6f3cd06e71b0881c09e22a830980234007ec633c93d562a178be743cb9"} Sep 29 19:19:17 crc kubenswrapper[4792]: I0929 19:19:17.767590 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"de3fc643-e567-4ae6-b446-f861b63822d7","Type":"ContainerStarted","Data":"8d23c57d44ec766ac21e017baaa81bb76e3b1f0862705bb794aab7881db50f3f"} Sep 29 19:19:17 crc kubenswrapper[4792]: I0929 19:19:17.770239 4792 generic.go:334] "Generic (PLEG): container finished" podID="c0164a3b-496a-40e7-a585-996878c0f653" containerID="827d009635bd0b92d62f8637bffad8a9920f2053e246ed145ba8d03576df4ba8" exitCode=0 Sep 29 19:19:17 crc kubenswrapper[4792]: I0929 19:19:17.770282 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"c0164a3b-496a-40e7-a585-996878c0f653","Type":"ContainerDied","Data":"827d009635bd0b92d62f8637bffad8a9920f2053e246ed145ba8d03576df4ba8"} Sep 29 19:19:17 crc kubenswrapper[4792]: I0929 19:19:17.770304 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"c0164a3b-496a-40e7-a585-996878c0f653","Type":"ContainerDied","Data":"1d1b44121f0893860c94d0f1024e394b3d6320a141eed16a81fae7a40af35bef"} Sep 29 19:19:17 crc kubenswrapper[4792]: I0929 19:19:17.770326 4792 scope.go:117] "RemoveContainer" containerID="827d009635bd0b92d62f8637bffad8a9920f2053e246ed145ba8d03576df4ba8" Sep 29 19:19:17 crc kubenswrapper[4792]: I0929 19:19:17.770429 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Sep 29 19:19:17 crc kubenswrapper[4792]: I0929 19:19:17.795416 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=1.795392188 podStartE2EDuration="1.795392188s" podCreationTimestamp="2025-09-29 19:19:16 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 19:19:17.784690998 +0000 UTC m=+1369.777998394" watchObservedRunningTime="2025-09-29 19:19:17.795392188 +0000 UTC m=+1369.788699594" Sep 29 19:19:17 crc kubenswrapper[4792]: I0929 19:19:17.820221 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Sep 29 19:19:17 crc kubenswrapper[4792]: I0929 19:19:17.822675 4792 scope.go:117] "RemoveContainer" containerID="3f103c3f8d8c80c6a75a41373801aa3506108303214e4e2cdcd440e45327b72f" Sep 29 19:19:17 crc kubenswrapper[4792]: I0929 19:19:17.837016 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Sep 29 19:19:17 crc kubenswrapper[4792]: I0929 19:19:17.851183 4792 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/c0164a3b-496a-40e7-a585-996878c0f653-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Sep 29 19:19:17 crc kubenswrapper[4792]: I0929 19:19:17.853842 4792 scope.go:117] "RemoveContainer" containerID="827d009635bd0b92d62f8637bffad8a9920f2053e246ed145ba8d03576df4ba8" Sep 29 19:19:17 crc kubenswrapper[4792]: E0929 19:19:17.854383 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"827d009635bd0b92d62f8637bffad8a9920f2053e246ed145ba8d03576df4ba8\": container with ID starting with 827d009635bd0b92d62f8637bffad8a9920f2053e246ed145ba8d03576df4ba8 not found: ID does not exist" containerID="827d009635bd0b92d62f8637bffad8a9920f2053e246ed145ba8d03576df4ba8" Sep 29 19:19:17 crc kubenswrapper[4792]: I0929 19:19:17.854420 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"827d009635bd0b92d62f8637bffad8a9920f2053e246ed145ba8d03576df4ba8"} err="failed to get container status \"827d009635bd0b92d62f8637bffad8a9920f2053e246ed145ba8d03576df4ba8\": rpc error: code = NotFound desc = could not find container \"827d009635bd0b92d62f8637bffad8a9920f2053e246ed145ba8d03576df4ba8\": container with ID starting with 827d009635bd0b92d62f8637bffad8a9920f2053e246ed145ba8d03576df4ba8 not found: ID does not exist" Sep 29 19:19:17 crc kubenswrapper[4792]: I0929 19:19:17.854446 4792 scope.go:117] "RemoveContainer" containerID="3f103c3f8d8c80c6a75a41373801aa3506108303214e4e2cdcd440e45327b72f" Sep 29 19:19:17 crc kubenswrapper[4792]: E0929 19:19:17.856195 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3f103c3f8d8c80c6a75a41373801aa3506108303214e4e2cdcd440e45327b72f\": container with ID starting with 3f103c3f8d8c80c6a75a41373801aa3506108303214e4e2cdcd440e45327b72f not found: ID does not exist" containerID="3f103c3f8d8c80c6a75a41373801aa3506108303214e4e2cdcd440e45327b72f" Sep 29 19:19:17 crc kubenswrapper[4792]: I0929 19:19:17.856236 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3f103c3f8d8c80c6a75a41373801aa3506108303214e4e2cdcd440e45327b72f"} err="failed to get container status \"3f103c3f8d8c80c6a75a41373801aa3506108303214e4e2cdcd440e45327b72f\": rpc error: code = NotFound desc = could not find container \"3f103c3f8d8c80c6a75a41373801aa3506108303214e4e2cdcd440e45327b72f\": container with ID starting with 3f103c3f8d8c80c6a75a41373801aa3506108303214e4e2cdcd440e45327b72f not found: ID does not exist" Sep 29 19:19:17 crc kubenswrapper[4792]: I0929 19:19:17.856281 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Sep 29 19:19:17 crc kubenswrapper[4792]: E0929 19:19:17.856787 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c0164a3b-496a-40e7-a585-996878c0f653" containerName="nova-api-api" Sep 29 19:19:17 crc kubenswrapper[4792]: I0929 19:19:17.856814 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="c0164a3b-496a-40e7-a585-996878c0f653" containerName="nova-api-api" Sep 29 19:19:17 crc kubenswrapper[4792]: E0929 19:19:17.856988 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c0164a3b-496a-40e7-a585-996878c0f653" containerName="nova-api-log" Sep 29 19:19:17 crc kubenswrapper[4792]: I0929 19:19:17.856998 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="c0164a3b-496a-40e7-a585-996878c0f653" containerName="nova-api-log" Sep 29 19:19:17 crc kubenswrapper[4792]: I0929 19:19:17.857254 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="c0164a3b-496a-40e7-a585-996878c0f653" containerName="nova-api-api" Sep 29 19:19:17 crc kubenswrapper[4792]: I0929 19:19:17.857287 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="c0164a3b-496a-40e7-a585-996878c0f653" containerName="nova-api-log" Sep 29 19:19:17 crc kubenswrapper[4792]: I0929 19:19:17.859884 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Sep 29 19:19:17 crc kubenswrapper[4792]: I0929 19:19:17.863184 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-internal-svc" Sep 29 19:19:17 crc kubenswrapper[4792]: I0929 19:19:17.863460 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-public-svc" Sep 29 19:19:17 crc kubenswrapper[4792]: I0929 19:19:17.863761 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Sep 29 19:19:17 crc kubenswrapper[4792]: I0929 19:19:17.884430 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Sep 29 19:19:17 crc kubenswrapper[4792]: I0929 19:19:17.954048 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/70e6c218-833c-460f-a81f-e126902df64b-public-tls-certs\") pod \"nova-api-0\" (UID: \"70e6c218-833c-460f-a81f-e126902df64b\") " pod="openstack/nova-api-0" Sep 29 19:19:17 crc kubenswrapper[4792]: I0929 19:19:17.954163 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/70e6c218-833c-460f-a81f-e126902df64b-config-data\") pod \"nova-api-0\" (UID: \"70e6c218-833c-460f-a81f-e126902df64b\") " pod="openstack/nova-api-0" Sep 29 19:19:17 crc kubenswrapper[4792]: I0929 19:19:17.954208 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/70e6c218-833c-460f-a81f-e126902df64b-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"70e6c218-833c-460f-a81f-e126902df64b\") " pod="openstack/nova-api-0" Sep 29 19:19:17 crc kubenswrapper[4792]: I0929 19:19:17.954234 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/70e6c218-833c-460f-a81f-e126902df64b-internal-tls-certs\") pod \"nova-api-0\" (UID: \"70e6c218-833c-460f-a81f-e126902df64b\") " pod="openstack/nova-api-0" Sep 29 19:19:17 crc kubenswrapper[4792]: I0929 19:19:17.954579 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/70e6c218-833c-460f-a81f-e126902df64b-logs\") pod \"nova-api-0\" (UID: \"70e6c218-833c-460f-a81f-e126902df64b\") " pod="openstack/nova-api-0" Sep 29 19:19:17 crc kubenswrapper[4792]: I0929 19:19:17.954641 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k6fx9\" (UniqueName: \"kubernetes.io/projected/70e6c218-833c-460f-a81f-e126902df64b-kube-api-access-k6fx9\") pod \"nova-api-0\" (UID: \"70e6c218-833c-460f-a81f-e126902df64b\") " pod="openstack/nova-api-0" Sep 29 19:19:18 crc kubenswrapper[4792]: I0929 19:19:18.056542 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/70e6c218-833c-460f-a81f-e126902df64b-logs\") pod \"nova-api-0\" (UID: \"70e6c218-833c-460f-a81f-e126902df64b\") " pod="openstack/nova-api-0" Sep 29 19:19:18 crc kubenswrapper[4792]: I0929 19:19:18.056588 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k6fx9\" (UniqueName: \"kubernetes.io/projected/70e6c218-833c-460f-a81f-e126902df64b-kube-api-access-k6fx9\") pod \"nova-api-0\" (UID: \"70e6c218-833c-460f-a81f-e126902df64b\") " pod="openstack/nova-api-0" Sep 29 19:19:18 crc kubenswrapper[4792]: I0929 19:19:18.056619 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/70e6c218-833c-460f-a81f-e126902df64b-public-tls-certs\") pod \"nova-api-0\" (UID: \"70e6c218-833c-460f-a81f-e126902df64b\") " pod="openstack/nova-api-0" Sep 29 19:19:18 crc kubenswrapper[4792]: I0929 19:19:18.056726 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/70e6c218-833c-460f-a81f-e126902df64b-config-data\") pod \"nova-api-0\" (UID: \"70e6c218-833c-460f-a81f-e126902df64b\") " pod="openstack/nova-api-0" Sep 29 19:19:18 crc kubenswrapper[4792]: I0929 19:19:18.056757 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/70e6c218-833c-460f-a81f-e126902df64b-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"70e6c218-833c-460f-a81f-e126902df64b\") " pod="openstack/nova-api-0" Sep 29 19:19:18 crc kubenswrapper[4792]: I0929 19:19:18.056781 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/70e6c218-833c-460f-a81f-e126902df64b-internal-tls-certs\") pod \"nova-api-0\" (UID: \"70e6c218-833c-460f-a81f-e126902df64b\") " pod="openstack/nova-api-0" Sep 29 19:19:18 crc kubenswrapper[4792]: I0929 19:19:18.058477 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/70e6c218-833c-460f-a81f-e126902df64b-logs\") pod \"nova-api-0\" (UID: \"70e6c218-833c-460f-a81f-e126902df64b\") " pod="openstack/nova-api-0" Sep 29 19:19:18 crc kubenswrapper[4792]: I0929 19:19:18.059883 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/70e6c218-833c-460f-a81f-e126902df64b-public-tls-certs\") pod \"nova-api-0\" (UID: \"70e6c218-833c-460f-a81f-e126902df64b\") " pod="openstack/nova-api-0" Sep 29 19:19:18 crc kubenswrapper[4792]: I0929 19:19:18.061228 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/70e6c218-833c-460f-a81f-e126902df64b-internal-tls-certs\") pod \"nova-api-0\" (UID: \"70e6c218-833c-460f-a81f-e126902df64b\") " pod="openstack/nova-api-0" Sep 29 19:19:18 crc kubenswrapper[4792]: I0929 19:19:18.065254 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/70e6c218-833c-460f-a81f-e126902df64b-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"70e6c218-833c-460f-a81f-e126902df64b\") " pod="openstack/nova-api-0" Sep 29 19:19:18 crc kubenswrapper[4792]: I0929 19:19:18.065981 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/70e6c218-833c-460f-a81f-e126902df64b-config-data\") pod \"nova-api-0\" (UID: \"70e6c218-833c-460f-a81f-e126902df64b\") " pod="openstack/nova-api-0" Sep 29 19:19:18 crc kubenswrapper[4792]: I0929 19:19:18.077017 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k6fx9\" (UniqueName: \"kubernetes.io/projected/70e6c218-833c-460f-a81f-e126902df64b-kube-api-access-k6fx9\") pod \"nova-api-0\" (UID: \"70e6c218-833c-460f-a81f-e126902df64b\") " pod="openstack/nova-api-0" Sep 29 19:19:18 crc kubenswrapper[4792]: I0929 19:19:18.194405 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Sep 29 19:19:18 crc kubenswrapper[4792]: I0929 19:19:18.701450 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Sep 29 19:19:18 crc kubenswrapper[4792]: I0929 19:19:18.787778 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"70e6c218-833c-460f-a81f-e126902df64b","Type":"ContainerStarted","Data":"bbb73ee9cae43956739e6f144731f32150a36b10261c0119705487978dd98e1d"} Sep 29 19:19:18 crc kubenswrapper[4792]: I0929 19:19:18.794039 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"de3fc643-e567-4ae6-b446-f861b63822d7","Type":"ContainerStarted","Data":"c95728937b4b952b59182ba5b1f4ba76a3a659a7fe0312772d9b8195d656e04a"} Sep 29 19:19:18 crc kubenswrapper[4792]: I0929 19:19:18.794072 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"de3fc643-e567-4ae6-b446-f861b63822d7","Type":"ContainerStarted","Data":"354e44a61f78562ab0fd13ea8b909bb7ac262102b314eb4e184e001fd41eafc5"} Sep 29 19:19:18 crc kubenswrapper[4792]: I0929 19:19:18.832558 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.832521162 podStartE2EDuration="2.832521162s" podCreationTimestamp="2025-09-29 19:19:16 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 19:19:18.816073662 +0000 UTC m=+1370.809381058" watchObservedRunningTime="2025-09-29 19:19:18.832521162 +0000 UTC m=+1370.825828558" Sep 29 19:19:19 crc kubenswrapper[4792]: I0929 19:19:19.031460 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c0164a3b-496a-40e7-a585-996878c0f653" path="/var/lib/kubelet/pods/c0164a3b-496a-40e7-a585-996878c0f653/volumes" Sep 29 19:19:19 crc kubenswrapper[4792]: I0929 19:19:19.819868 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"70e6c218-833c-460f-a81f-e126902df64b","Type":"ContainerStarted","Data":"1db00dedae8e23cb766c6a11e618c05fc2bd33250f98e4b985635756f06c84c4"} Sep 29 19:19:19 crc kubenswrapper[4792]: I0929 19:19:19.820290 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"70e6c218-833c-460f-a81f-e126902df64b","Type":"ContainerStarted","Data":"d179e1244c6f3bfe85885382099115f5250e5ec098ffb57f5c84c4799cd32899"} Sep 29 19:19:21 crc kubenswrapper[4792]: I0929 19:19:21.452731 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Sep 29 19:19:22 crc kubenswrapper[4792]: I0929 19:19:22.136969 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Sep 29 19:19:22 crc kubenswrapper[4792]: I0929 19:19:22.137038 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Sep 29 19:19:26 crc kubenswrapper[4792]: I0929 19:19:26.452637 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Sep 29 19:19:26 crc kubenswrapper[4792]: I0929 19:19:26.477573 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Sep 29 19:19:26 crc kubenswrapper[4792]: I0929 19:19:26.496586 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=9.496568757 podStartE2EDuration="9.496568757s" podCreationTimestamp="2025-09-29 19:19:17 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 19:19:19.857139531 +0000 UTC m=+1371.850446947" watchObservedRunningTime="2025-09-29 19:19:26.496568757 +0000 UTC m=+1378.489876153" Sep 29 19:19:26 crc kubenswrapper[4792]: I0929 19:19:26.907251 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Sep 29 19:19:27 crc kubenswrapper[4792]: I0929 19:19:27.136332 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Sep 29 19:19:27 crc kubenswrapper[4792]: I0929 19:19:27.136402 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Sep 29 19:19:28 crc kubenswrapper[4792]: I0929 19:19:28.149975 4792 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="de3fc643-e567-4ae6-b446-f861b63822d7" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.205:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Sep 29 19:19:28 crc kubenswrapper[4792]: I0929 19:19:28.150022 4792 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="de3fc643-e567-4ae6-b446-f861b63822d7" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.205:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Sep 29 19:19:28 crc kubenswrapper[4792]: I0929 19:19:28.194628 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Sep 29 19:19:28 crc kubenswrapper[4792]: I0929 19:19:28.195086 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Sep 29 19:19:29 crc kubenswrapper[4792]: I0929 19:19:29.207342 4792 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="70e6c218-833c-460f-a81f-e126902df64b" containerName="nova-api-log" probeResult="failure" output="Get \"https://10.217.0.206:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Sep 29 19:19:29 crc kubenswrapper[4792]: I0929 19:19:29.207402 4792 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="70e6c218-833c-460f-a81f-e126902df64b" containerName="nova-api-api" probeResult="failure" output="Get \"https://10.217.0.206:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Sep 29 19:19:29 crc kubenswrapper[4792]: I0929 19:19:29.912470 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Sep 29 19:19:37 crc kubenswrapper[4792]: I0929 19:19:37.142929 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Sep 29 19:19:37 crc kubenswrapper[4792]: I0929 19:19:37.143795 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Sep 29 19:19:37 crc kubenswrapper[4792]: I0929 19:19:37.150276 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Sep 29 19:19:37 crc kubenswrapper[4792]: I0929 19:19:37.152264 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Sep 29 19:19:38 crc kubenswrapper[4792]: I0929 19:19:38.201257 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Sep 29 19:19:38 crc kubenswrapper[4792]: I0929 19:19:38.202104 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Sep 29 19:19:38 crc kubenswrapper[4792]: I0929 19:19:38.207293 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Sep 29 19:19:38 crc kubenswrapper[4792]: I0929 19:19:38.209456 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Sep 29 19:19:38 crc kubenswrapper[4792]: I0929 19:19:38.994588 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Sep 29 19:19:39 crc kubenswrapper[4792]: I0929 19:19:39.001038 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Sep 29 19:19:47 crc kubenswrapper[4792]: I0929 19:19:47.043294 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-server-0"] Sep 29 19:19:48 crc kubenswrapper[4792]: I0929 19:19:48.030719 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Sep 29 19:19:51 crc kubenswrapper[4792]: I0929 19:19:51.744991 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/rabbitmq-server-0" podUID="cf5405ae-97dd-404d-9b0c-4d0faaf961cb" containerName="rabbitmq" containerID="cri-o://512a2a6186a745534cc73538ff5a9222d0525e9cef117403944e068c5c4647f5" gracePeriod=604796 Sep 29 19:19:52 crc kubenswrapper[4792]: I0929 19:19:52.831353 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/rabbitmq-cell1-server-0" podUID="62bc84b7-9b21-447c-b1c3-21c4f178ba26" containerName="rabbitmq" containerID="cri-o://b526a1929b86be5299fbda31deae78e3be086c29bf1ddad0937a6ccfb383e8a0" gracePeriod=604796 Sep 29 19:19:55 crc kubenswrapper[4792]: I0929 19:19:55.803741 4792 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/rabbitmq-server-0" podUID="cf5405ae-97dd-404d-9b0c-4d0faaf961cb" containerName="rabbitmq" probeResult="failure" output="dial tcp 10.217.0.101:5671: connect: connection refused" Sep 29 19:19:56 crc kubenswrapper[4792]: I0929 19:19:56.211932 4792 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/rabbitmq-cell1-server-0" podUID="62bc84b7-9b21-447c-b1c3-21c4f178ba26" containerName="rabbitmq" probeResult="failure" output="dial tcp 10.217.0.102:5671: connect: connection refused" Sep 29 19:19:58 crc kubenswrapper[4792]: I0929 19:19:58.485821 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Sep 29 19:19:58 crc kubenswrapper[4792]: I0929 19:19:58.643620 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/cf5405ae-97dd-404d-9b0c-4d0faaf961cb-rabbitmq-erlang-cookie\") pod \"cf5405ae-97dd-404d-9b0c-4d0faaf961cb\" (UID: \"cf5405ae-97dd-404d-9b0c-4d0faaf961cb\") " Sep 29 19:19:58 crc kubenswrapper[4792]: I0929 19:19:58.643717 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/cf5405ae-97dd-404d-9b0c-4d0faaf961cb-config-data\") pod \"cf5405ae-97dd-404d-9b0c-4d0faaf961cb\" (UID: \"cf5405ae-97dd-404d-9b0c-4d0faaf961cb\") " Sep 29 19:19:58 crc kubenswrapper[4792]: I0929 19:19:58.643838 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/cf5405ae-97dd-404d-9b0c-4d0faaf961cb-server-conf\") pod \"cf5405ae-97dd-404d-9b0c-4d0faaf961cb\" (UID: \"cf5405ae-97dd-404d-9b0c-4d0faaf961cb\") " Sep 29 19:19:58 crc kubenswrapper[4792]: I0929 19:19:58.643907 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"persistence\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"cf5405ae-97dd-404d-9b0c-4d0faaf961cb\" (UID: \"cf5405ae-97dd-404d-9b0c-4d0faaf961cb\") " Sep 29 19:19:58 crc kubenswrapper[4792]: I0929 19:19:58.643930 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/cf5405ae-97dd-404d-9b0c-4d0faaf961cb-plugins-conf\") pod \"cf5405ae-97dd-404d-9b0c-4d0faaf961cb\" (UID: \"cf5405ae-97dd-404d-9b0c-4d0faaf961cb\") " Sep 29 19:19:58 crc kubenswrapper[4792]: I0929 19:19:58.643953 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-f8mjt\" (UniqueName: \"kubernetes.io/projected/cf5405ae-97dd-404d-9b0c-4d0faaf961cb-kube-api-access-f8mjt\") pod \"cf5405ae-97dd-404d-9b0c-4d0faaf961cb\" (UID: \"cf5405ae-97dd-404d-9b0c-4d0faaf961cb\") " Sep 29 19:19:58 crc kubenswrapper[4792]: I0929 19:19:58.643983 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/cf5405ae-97dd-404d-9b0c-4d0faaf961cb-pod-info\") pod \"cf5405ae-97dd-404d-9b0c-4d0faaf961cb\" (UID: \"cf5405ae-97dd-404d-9b0c-4d0faaf961cb\") " Sep 29 19:19:58 crc kubenswrapper[4792]: I0929 19:19:58.644052 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/cf5405ae-97dd-404d-9b0c-4d0faaf961cb-erlang-cookie-secret\") pod \"cf5405ae-97dd-404d-9b0c-4d0faaf961cb\" (UID: \"cf5405ae-97dd-404d-9b0c-4d0faaf961cb\") " Sep 29 19:19:58 crc kubenswrapper[4792]: I0929 19:19:58.644094 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/cf5405ae-97dd-404d-9b0c-4d0faaf961cb-rabbitmq-tls\") pod \"cf5405ae-97dd-404d-9b0c-4d0faaf961cb\" (UID: \"cf5405ae-97dd-404d-9b0c-4d0faaf961cb\") " Sep 29 19:19:58 crc kubenswrapper[4792]: I0929 19:19:58.644125 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/cf5405ae-97dd-404d-9b0c-4d0faaf961cb-rabbitmq-confd\") pod \"cf5405ae-97dd-404d-9b0c-4d0faaf961cb\" (UID: \"cf5405ae-97dd-404d-9b0c-4d0faaf961cb\") " Sep 29 19:19:58 crc kubenswrapper[4792]: I0929 19:19:58.644175 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/cf5405ae-97dd-404d-9b0c-4d0faaf961cb-rabbitmq-plugins\") pod \"cf5405ae-97dd-404d-9b0c-4d0faaf961cb\" (UID: \"cf5405ae-97dd-404d-9b0c-4d0faaf961cb\") " Sep 29 19:19:58 crc kubenswrapper[4792]: I0929 19:19:58.648202 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cf5405ae-97dd-404d-9b0c-4d0faaf961cb-rabbitmq-plugins" (OuterVolumeSpecName: "rabbitmq-plugins") pod "cf5405ae-97dd-404d-9b0c-4d0faaf961cb" (UID: "cf5405ae-97dd-404d-9b0c-4d0faaf961cb"). InnerVolumeSpecName "rabbitmq-plugins". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 19:19:58 crc kubenswrapper[4792]: I0929 19:19:58.650396 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cf5405ae-97dd-404d-9b0c-4d0faaf961cb-rabbitmq-erlang-cookie" (OuterVolumeSpecName: "rabbitmq-erlang-cookie") pod "cf5405ae-97dd-404d-9b0c-4d0faaf961cb" (UID: "cf5405ae-97dd-404d-9b0c-4d0faaf961cb"). InnerVolumeSpecName "rabbitmq-erlang-cookie". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 19:19:58 crc kubenswrapper[4792]: I0929 19:19:58.654691 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cf5405ae-97dd-404d-9b0c-4d0faaf961cb-rabbitmq-tls" (OuterVolumeSpecName: "rabbitmq-tls") pod "cf5405ae-97dd-404d-9b0c-4d0faaf961cb" (UID: "cf5405ae-97dd-404d-9b0c-4d0faaf961cb"). InnerVolumeSpecName "rabbitmq-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:19:58 crc kubenswrapper[4792]: I0929 19:19:58.658667 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/cf5405ae-97dd-404d-9b0c-4d0faaf961cb-plugins-conf" (OuterVolumeSpecName: "plugins-conf") pod "cf5405ae-97dd-404d-9b0c-4d0faaf961cb" (UID: "cf5405ae-97dd-404d-9b0c-4d0faaf961cb"). InnerVolumeSpecName "plugins-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:19:58 crc kubenswrapper[4792]: I0929 19:19:58.659147 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/cf5405ae-97dd-404d-9b0c-4d0faaf961cb-pod-info" (OuterVolumeSpecName: "pod-info") pod "cf5405ae-97dd-404d-9b0c-4d0faaf961cb" (UID: "cf5405ae-97dd-404d-9b0c-4d0faaf961cb"). InnerVolumeSpecName "pod-info". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Sep 29 19:19:58 crc kubenswrapper[4792]: I0929 19:19:58.662406 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cf5405ae-97dd-404d-9b0c-4d0faaf961cb-kube-api-access-f8mjt" (OuterVolumeSpecName: "kube-api-access-f8mjt") pod "cf5405ae-97dd-404d-9b0c-4d0faaf961cb" (UID: "cf5405ae-97dd-404d-9b0c-4d0faaf961cb"). InnerVolumeSpecName "kube-api-access-f8mjt". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:19:58 crc kubenswrapper[4792]: I0929 19:19:58.662467 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cf5405ae-97dd-404d-9b0c-4d0faaf961cb-erlang-cookie-secret" (OuterVolumeSpecName: "erlang-cookie-secret") pod "cf5405ae-97dd-404d-9b0c-4d0faaf961cb" (UID: "cf5405ae-97dd-404d-9b0c-4d0faaf961cb"). InnerVolumeSpecName "erlang-cookie-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:19:58 crc kubenswrapper[4792]: I0929 19:19:58.669499 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage09-crc" (OuterVolumeSpecName: "persistence") pod "cf5405ae-97dd-404d-9b0c-4d0faaf961cb" (UID: "cf5405ae-97dd-404d-9b0c-4d0faaf961cb"). InnerVolumeSpecName "local-storage09-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Sep 29 19:19:58 crc kubenswrapper[4792]: I0929 19:19:58.738191 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/cf5405ae-97dd-404d-9b0c-4d0faaf961cb-config-data" (OuterVolumeSpecName: "config-data") pod "cf5405ae-97dd-404d-9b0c-4d0faaf961cb" (UID: "cf5405ae-97dd-404d-9b0c-4d0faaf961cb"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:19:58 crc kubenswrapper[4792]: I0929 19:19:58.749506 4792 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") on node \"crc\" " Sep 29 19:19:58 crc kubenswrapper[4792]: I0929 19:19:58.749637 4792 reconciler_common.go:293] "Volume detached for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/cf5405ae-97dd-404d-9b0c-4d0faaf961cb-plugins-conf\") on node \"crc\" DevicePath \"\"" Sep 29 19:19:58 crc kubenswrapper[4792]: I0929 19:19:58.749715 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-f8mjt\" (UniqueName: \"kubernetes.io/projected/cf5405ae-97dd-404d-9b0c-4d0faaf961cb-kube-api-access-f8mjt\") on node \"crc\" DevicePath \"\"" Sep 29 19:19:58 crc kubenswrapper[4792]: I0929 19:19:58.749776 4792 reconciler_common.go:293] "Volume detached for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/cf5405ae-97dd-404d-9b0c-4d0faaf961cb-pod-info\") on node \"crc\" DevicePath \"\"" Sep 29 19:19:58 crc kubenswrapper[4792]: I0929 19:19:58.749845 4792 reconciler_common.go:293] "Volume detached for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/cf5405ae-97dd-404d-9b0c-4d0faaf961cb-erlang-cookie-secret\") on node \"crc\" DevicePath \"\"" Sep 29 19:19:58 crc kubenswrapper[4792]: I0929 19:19:58.749929 4792 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/cf5405ae-97dd-404d-9b0c-4d0faaf961cb-rabbitmq-tls\") on node \"crc\" DevicePath \"\"" Sep 29 19:19:58 crc kubenswrapper[4792]: I0929 19:19:58.749986 4792 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/cf5405ae-97dd-404d-9b0c-4d0faaf961cb-rabbitmq-plugins\") on node \"crc\" DevicePath \"\"" Sep 29 19:19:58 crc kubenswrapper[4792]: I0929 19:19:58.750052 4792 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/cf5405ae-97dd-404d-9b0c-4d0faaf961cb-rabbitmq-erlang-cookie\") on node \"crc\" DevicePath \"\"" Sep 29 19:19:58 crc kubenswrapper[4792]: I0929 19:19:58.750110 4792 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/cf5405ae-97dd-404d-9b0c-4d0faaf961cb-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 19:19:58 crc kubenswrapper[4792]: I0929 19:19:58.795193 4792 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage09-crc" (UniqueName: "kubernetes.io/local-volume/local-storage09-crc") on node "crc" Sep 29 19:19:58 crc kubenswrapper[4792]: I0929 19:19:58.806876 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/cf5405ae-97dd-404d-9b0c-4d0faaf961cb-server-conf" (OuterVolumeSpecName: "server-conf") pod "cf5405ae-97dd-404d-9b0c-4d0faaf961cb" (UID: "cf5405ae-97dd-404d-9b0c-4d0faaf961cb"). InnerVolumeSpecName "server-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:19:58 crc kubenswrapper[4792]: I0929 19:19:58.851497 4792 reconciler_common.go:293] "Volume detached for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/cf5405ae-97dd-404d-9b0c-4d0faaf961cb-server-conf\") on node \"crc\" DevicePath \"\"" Sep 29 19:19:58 crc kubenswrapper[4792]: I0929 19:19:58.851531 4792 reconciler_common.go:293] "Volume detached for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") on node \"crc\" DevicePath \"\"" Sep 29 19:19:58 crc kubenswrapper[4792]: I0929 19:19:58.857064 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cf5405ae-97dd-404d-9b0c-4d0faaf961cb-rabbitmq-confd" (OuterVolumeSpecName: "rabbitmq-confd") pod "cf5405ae-97dd-404d-9b0c-4d0faaf961cb" (UID: "cf5405ae-97dd-404d-9b0c-4d0faaf961cb"). InnerVolumeSpecName "rabbitmq-confd". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:19:58 crc kubenswrapper[4792]: I0929 19:19:58.954168 4792 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/cf5405ae-97dd-404d-9b0c-4d0faaf961cb-rabbitmq-confd\") on node \"crc\" DevicePath \"\"" Sep 29 19:19:59 crc kubenswrapper[4792]: I0929 19:19:59.203241 4792 generic.go:334] "Generic (PLEG): container finished" podID="62bc84b7-9b21-447c-b1c3-21c4f178ba26" containerID="b526a1929b86be5299fbda31deae78e3be086c29bf1ddad0937a6ccfb383e8a0" exitCode=0 Sep 29 19:19:59 crc kubenswrapper[4792]: I0929 19:19:59.203329 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"62bc84b7-9b21-447c-b1c3-21c4f178ba26","Type":"ContainerDied","Data":"b526a1929b86be5299fbda31deae78e3be086c29bf1ddad0937a6ccfb383e8a0"} Sep 29 19:19:59 crc kubenswrapper[4792]: I0929 19:19:59.222912 4792 generic.go:334] "Generic (PLEG): container finished" podID="cf5405ae-97dd-404d-9b0c-4d0faaf961cb" containerID="512a2a6186a745534cc73538ff5a9222d0525e9cef117403944e068c5c4647f5" exitCode=0 Sep 29 19:19:59 crc kubenswrapper[4792]: I0929 19:19:59.222954 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"cf5405ae-97dd-404d-9b0c-4d0faaf961cb","Type":"ContainerDied","Data":"512a2a6186a745534cc73538ff5a9222d0525e9cef117403944e068c5c4647f5"} Sep 29 19:19:59 crc kubenswrapper[4792]: I0929 19:19:59.222980 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"cf5405ae-97dd-404d-9b0c-4d0faaf961cb","Type":"ContainerDied","Data":"868e208c8630b90861e90343b66e1785f37474c0424779d9f0f558c4c204da2d"} Sep 29 19:19:59 crc kubenswrapper[4792]: I0929 19:19:59.222982 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Sep 29 19:19:59 crc kubenswrapper[4792]: I0929 19:19:59.222996 4792 scope.go:117] "RemoveContainer" containerID="512a2a6186a745534cc73538ff5a9222d0525e9cef117403944e068c5c4647f5" Sep 29 19:19:59 crc kubenswrapper[4792]: I0929 19:19:59.283069 4792 scope.go:117] "RemoveContainer" containerID="14e4cd7eecd90fb1cd3ec718807f43255f22d3aea6c40d065dfa59e4cfaa29fd" Sep 29 19:19:59 crc kubenswrapper[4792]: I0929 19:19:59.288925 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-server-0"] Sep 29 19:19:59 crc kubenswrapper[4792]: I0929 19:19:59.308268 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/rabbitmq-server-0"] Sep 29 19:19:59 crc kubenswrapper[4792]: I0929 19:19:59.328601 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-server-0"] Sep 29 19:19:59 crc kubenswrapper[4792]: E0929 19:19:59.329255 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cf5405ae-97dd-404d-9b0c-4d0faaf961cb" containerName="setup-container" Sep 29 19:19:59 crc kubenswrapper[4792]: I0929 19:19:59.329271 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="cf5405ae-97dd-404d-9b0c-4d0faaf961cb" containerName="setup-container" Sep 29 19:19:59 crc kubenswrapper[4792]: E0929 19:19:59.329328 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cf5405ae-97dd-404d-9b0c-4d0faaf961cb" containerName="rabbitmq" Sep 29 19:19:59 crc kubenswrapper[4792]: I0929 19:19:59.329335 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="cf5405ae-97dd-404d-9b0c-4d0faaf961cb" containerName="rabbitmq" Sep 29 19:19:59 crc kubenswrapper[4792]: I0929 19:19:59.329727 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="cf5405ae-97dd-404d-9b0c-4d0faaf961cb" containerName="rabbitmq" Sep 29 19:19:59 crc kubenswrapper[4792]: I0929 19:19:59.336751 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Sep 29 19:19:59 crc kubenswrapper[4792]: I0929 19:19:59.343341 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-config-data" Sep 29 19:19:59 crc kubenswrapper[4792]: I0929 19:19:59.343544 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-default-user" Sep 29 19:19:59 crc kubenswrapper[4792]: I0929 19:19:59.343648 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-plugins-conf" Sep 29 19:19:59 crc kubenswrapper[4792]: I0929 19:19:59.343788 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-server-conf" Sep 29 19:19:59 crc kubenswrapper[4792]: I0929 19:19:59.343915 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-server-dockercfg-dwb5k" Sep 29 19:19:59 crc kubenswrapper[4792]: I0929 19:19:59.344146 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-svc" Sep 29 19:19:59 crc kubenswrapper[4792]: I0929 19:19:59.344181 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-erlang-cookie" Sep 29 19:19:59 crc kubenswrapper[4792]: I0929 19:19:59.357011 4792 scope.go:117] "RemoveContainer" containerID="512a2a6186a745534cc73538ff5a9222d0525e9cef117403944e068c5c4647f5" Sep 29 19:19:59 crc kubenswrapper[4792]: E0929 19:19:59.358270 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"512a2a6186a745534cc73538ff5a9222d0525e9cef117403944e068c5c4647f5\": container with ID starting with 512a2a6186a745534cc73538ff5a9222d0525e9cef117403944e068c5c4647f5 not found: ID does not exist" containerID="512a2a6186a745534cc73538ff5a9222d0525e9cef117403944e068c5c4647f5" Sep 29 19:19:59 crc kubenswrapper[4792]: I0929 19:19:59.358304 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"512a2a6186a745534cc73538ff5a9222d0525e9cef117403944e068c5c4647f5"} err="failed to get container status \"512a2a6186a745534cc73538ff5a9222d0525e9cef117403944e068c5c4647f5\": rpc error: code = NotFound desc = could not find container \"512a2a6186a745534cc73538ff5a9222d0525e9cef117403944e068c5c4647f5\": container with ID starting with 512a2a6186a745534cc73538ff5a9222d0525e9cef117403944e068c5c4647f5 not found: ID does not exist" Sep 29 19:19:59 crc kubenswrapper[4792]: I0929 19:19:59.358328 4792 scope.go:117] "RemoveContainer" containerID="14e4cd7eecd90fb1cd3ec718807f43255f22d3aea6c40d065dfa59e4cfaa29fd" Sep 29 19:19:59 crc kubenswrapper[4792]: E0929 19:19:59.359942 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"14e4cd7eecd90fb1cd3ec718807f43255f22d3aea6c40d065dfa59e4cfaa29fd\": container with ID starting with 14e4cd7eecd90fb1cd3ec718807f43255f22d3aea6c40d065dfa59e4cfaa29fd not found: ID does not exist" containerID="14e4cd7eecd90fb1cd3ec718807f43255f22d3aea6c40d065dfa59e4cfaa29fd" Sep 29 19:19:59 crc kubenswrapper[4792]: I0929 19:19:59.359969 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"14e4cd7eecd90fb1cd3ec718807f43255f22d3aea6c40d065dfa59e4cfaa29fd"} err="failed to get container status \"14e4cd7eecd90fb1cd3ec718807f43255f22d3aea6c40d065dfa59e4cfaa29fd\": rpc error: code = NotFound desc = could not find container \"14e4cd7eecd90fb1cd3ec718807f43255f22d3aea6c40d065dfa59e4cfaa29fd\": container with ID starting with 14e4cd7eecd90fb1cd3ec718807f43255f22d3aea6c40d065dfa59e4cfaa29fd not found: ID does not exist" Sep 29 19:19:59 crc kubenswrapper[4792]: I0929 19:19:59.386187 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Sep 29 19:19:59 crc kubenswrapper[4792]: I0929 19:19:59.471969 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/dd9e8433-9eac-49a2-bacd-7acb220b0efd-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"dd9e8433-9eac-49a2-bacd-7acb220b0efd\") " pod="openstack/rabbitmq-server-0" Sep 29 19:19:59 crc kubenswrapper[4792]: I0929 19:19:59.472263 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/dd9e8433-9eac-49a2-bacd-7acb220b0efd-config-data\") pod \"rabbitmq-server-0\" (UID: \"dd9e8433-9eac-49a2-bacd-7acb220b0efd\") " pod="openstack/rabbitmq-server-0" Sep 29 19:19:59 crc kubenswrapper[4792]: I0929 19:19:59.472313 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/dd9e8433-9eac-49a2-bacd-7acb220b0efd-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"dd9e8433-9eac-49a2-bacd-7acb220b0efd\") " pod="openstack/rabbitmq-server-0" Sep 29 19:19:59 crc kubenswrapper[4792]: I0929 19:19:59.472336 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/dd9e8433-9eac-49a2-bacd-7acb220b0efd-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"dd9e8433-9eac-49a2-bacd-7acb220b0efd\") " pod="openstack/rabbitmq-server-0" Sep 29 19:19:59 crc kubenswrapper[4792]: I0929 19:19:59.472353 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/dd9e8433-9eac-49a2-bacd-7acb220b0efd-pod-info\") pod \"rabbitmq-server-0\" (UID: \"dd9e8433-9eac-49a2-bacd-7acb220b0efd\") " pod="openstack/rabbitmq-server-0" Sep 29 19:19:59 crc kubenswrapper[4792]: I0929 19:19:59.472408 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/dd9e8433-9eac-49a2-bacd-7acb220b0efd-server-conf\") pod \"rabbitmq-server-0\" (UID: \"dd9e8433-9eac-49a2-bacd-7acb220b0efd\") " pod="openstack/rabbitmq-server-0" Sep 29 19:19:59 crc kubenswrapper[4792]: I0929 19:19:59.472428 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"rabbitmq-server-0\" (UID: \"dd9e8433-9eac-49a2-bacd-7acb220b0efd\") " pod="openstack/rabbitmq-server-0" Sep 29 19:19:59 crc kubenswrapper[4792]: I0929 19:19:59.472447 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/dd9e8433-9eac-49a2-bacd-7acb220b0efd-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"dd9e8433-9eac-49a2-bacd-7acb220b0efd\") " pod="openstack/rabbitmq-server-0" Sep 29 19:19:59 crc kubenswrapper[4792]: I0929 19:19:59.472469 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/dd9e8433-9eac-49a2-bacd-7acb220b0efd-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"dd9e8433-9eac-49a2-bacd-7acb220b0efd\") " pod="openstack/rabbitmq-server-0" Sep 29 19:19:59 crc kubenswrapper[4792]: I0929 19:19:59.472527 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/dd9e8433-9eac-49a2-bacd-7acb220b0efd-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"dd9e8433-9eac-49a2-bacd-7acb220b0efd\") " pod="openstack/rabbitmq-server-0" Sep 29 19:19:59 crc kubenswrapper[4792]: I0929 19:19:59.472544 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s6qpt\" (UniqueName: \"kubernetes.io/projected/dd9e8433-9eac-49a2-bacd-7acb220b0efd-kube-api-access-s6qpt\") pod \"rabbitmq-server-0\" (UID: \"dd9e8433-9eac-49a2-bacd-7acb220b0efd\") " pod="openstack/rabbitmq-server-0" Sep 29 19:19:59 crc kubenswrapper[4792]: I0929 19:19:59.575795 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/dd9e8433-9eac-49a2-bacd-7acb220b0efd-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"dd9e8433-9eac-49a2-bacd-7acb220b0efd\") " pod="openstack/rabbitmq-server-0" Sep 29 19:19:59 crc kubenswrapper[4792]: I0929 19:19:59.575856 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/dd9e8433-9eac-49a2-bacd-7acb220b0efd-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"dd9e8433-9eac-49a2-bacd-7acb220b0efd\") " pod="openstack/rabbitmq-server-0" Sep 29 19:19:59 crc kubenswrapper[4792]: I0929 19:19:59.575876 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/dd9e8433-9eac-49a2-bacd-7acb220b0efd-pod-info\") pod \"rabbitmq-server-0\" (UID: \"dd9e8433-9eac-49a2-bacd-7acb220b0efd\") " pod="openstack/rabbitmq-server-0" Sep 29 19:19:59 crc kubenswrapper[4792]: I0929 19:19:59.575933 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/dd9e8433-9eac-49a2-bacd-7acb220b0efd-server-conf\") pod \"rabbitmq-server-0\" (UID: \"dd9e8433-9eac-49a2-bacd-7acb220b0efd\") " pod="openstack/rabbitmq-server-0" Sep 29 19:19:59 crc kubenswrapper[4792]: I0929 19:19:59.575951 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"rabbitmq-server-0\" (UID: \"dd9e8433-9eac-49a2-bacd-7acb220b0efd\") " pod="openstack/rabbitmq-server-0" Sep 29 19:19:59 crc kubenswrapper[4792]: I0929 19:19:59.575972 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/dd9e8433-9eac-49a2-bacd-7acb220b0efd-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"dd9e8433-9eac-49a2-bacd-7acb220b0efd\") " pod="openstack/rabbitmq-server-0" Sep 29 19:19:59 crc kubenswrapper[4792]: I0929 19:19:59.575991 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/dd9e8433-9eac-49a2-bacd-7acb220b0efd-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"dd9e8433-9eac-49a2-bacd-7acb220b0efd\") " pod="openstack/rabbitmq-server-0" Sep 29 19:19:59 crc kubenswrapper[4792]: I0929 19:19:59.576038 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/dd9e8433-9eac-49a2-bacd-7acb220b0efd-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"dd9e8433-9eac-49a2-bacd-7acb220b0efd\") " pod="openstack/rabbitmq-server-0" Sep 29 19:19:59 crc kubenswrapper[4792]: I0929 19:19:59.576052 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s6qpt\" (UniqueName: \"kubernetes.io/projected/dd9e8433-9eac-49a2-bacd-7acb220b0efd-kube-api-access-s6qpt\") pod \"rabbitmq-server-0\" (UID: \"dd9e8433-9eac-49a2-bacd-7acb220b0efd\") " pod="openstack/rabbitmq-server-0" Sep 29 19:19:59 crc kubenswrapper[4792]: I0929 19:19:59.576082 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/dd9e8433-9eac-49a2-bacd-7acb220b0efd-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"dd9e8433-9eac-49a2-bacd-7acb220b0efd\") " pod="openstack/rabbitmq-server-0" Sep 29 19:19:59 crc kubenswrapper[4792]: I0929 19:19:59.576097 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/dd9e8433-9eac-49a2-bacd-7acb220b0efd-config-data\") pod \"rabbitmq-server-0\" (UID: \"dd9e8433-9eac-49a2-bacd-7acb220b0efd\") " pod="openstack/rabbitmq-server-0" Sep 29 19:19:59 crc kubenswrapper[4792]: I0929 19:19:59.576932 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/dd9e8433-9eac-49a2-bacd-7acb220b0efd-config-data\") pod \"rabbitmq-server-0\" (UID: \"dd9e8433-9eac-49a2-bacd-7acb220b0efd\") " pod="openstack/rabbitmq-server-0" Sep 29 19:19:59 crc kubenswrapper[4792]: I0929 19:19:59.577188 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/dd9e8433-9eac-49a2-bacd-7acb220b0efd-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"dd9e8433-9eac-49a2-bacd-7acb220b0efd\") " pod="openstack/rabbitmq-server-0" Sep 29 19:19:59 crc kubenswrapper[4792]: I0929 19:19:59.577431 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/dd9e8433-9eac-49a2-bacd-7acb220b0efd-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"dd9e8433-9eac-49a2-bacd-7acb220b0efd\") " pod="openstack/rabbitmq-server-0" Sep 29 19:19:59 crc kubenswrapper[4792]: I0929 19:19:59.580873 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/dd9e8433-9eac-49a2-bacd-7acb220b0efd-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"dd9e8433-9eac-49a2-bacd-7acb220b0efd\") " pod="openstack/rabbitmq-server-0" Sep 29 19:19:59 crc kubenswrapper[4792]: I0929 19:19:59.581141 4792 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"rabbitmq-server-0\" (UID: \"dd9e8433-9eac-49a2-bacd-7acb220b0efd\") device mount path \"/mnt/openstack/pv09\"" pod="openstack/rabbitmq-server-0" Sep 29 19:19:59 crc kubenswrapper[4792]: I0929 19:19:59.581496 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/dd9e8433-9eac-49a2-bacd-7acb220b0efd-server-conf\") pod \"rabbitmq-server-0\" (UID: \"dd9e8433-9eac-49a2-bacd-7acb220b0efd\") " pod="openstack/rabbitmq-server-0" Sep 29 19:19:59 crc kubenswrapper[4792]: I0929 19:19:59.582200 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/dd9e8433-9eac-49a2-bacd-7acb220b0efd-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"dd9e8433-9eac-49a2-bacd-7acb220b0efd\") " pod="openstack/rabbitmq-server-0" Sep 29 19:19:59 crc kubenswrapper[4792]: I0929 19:19:59.584113 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/dd9e8433-9eac-49a2-bacd-7acb220b0efd-pod-info\") pod \"rabbitmq-server-0\" (UID: \"dd9e8433-9eac-49a2-bacd-7acb220b0efd\") " pod="openstack/rabbitmq-server-0" Sep 29 19:19:59 crc kubenswrapper[4792]: I0929 19:19:59.584582 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/dd9e8433-9eac-49a2-bacd-7acb220b0efd-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"dd9e8433-9eac-49a2-bacd-7acb220b0efd\") " pod="openstack/rabbitmq-server-0" Sep 29 19:19:59 crc kubenswrapper[4792]: I0929 19:19:59.589715 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/dd9e8433-9eac-49a2-bacd-7acb220b0efd-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"dd9e8433-9eac-49a2-bacd-7acb220b0efd\") " pod="openstack/rabbitmq-server-0" Sep 29 19:19:59 crc kubenswrapper[4792]: I0929 19:19:59.605025 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s6qpt\" (UniqueName: \"kubernetes.io/projected/dd9e8433-9eac-49a2-bacd-7acb220b0efd-kube-api-access-s6qpt\") pod \"rabbitmq-server-0\" (UID: \"dd9e8433-9eac-49a2-bacd-7acb220b0efd\") " pod="openstack/rabbitmq-server-0" Sep 29 19:19:59 crc kubenswrapper[4792]: I0929 19:19:59.633834 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"rabbitmq-server-0\" (UID: \"dd9e8433-9eac-49a2-bacd-7acb220b0efd\") " pod="openstack/rabbitmq-server-0" Sep 29 19:19:59 crc kubenswrapper[4792]: I0929 19:19:59.712379 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Sep 29 19:19:59 crc kubenswrapper[4792]: I0929 19:19:59.723691 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Sep 29 19:19:59 crc kubenswrapper[4792]: I0929 19:19:59.881392 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/62bc84b7-9b21-447c-b1c3-21c4f178ba26-pod-info\") pod \"62bc84b7-9b21-447c-b1c3-21c4f178ba26\" (UID: \"62bc84b7-9b21-447c-b1c3-21c4f178ba26\") " Sep 29 19:19:59 crc kubenswrapper[4792]: I0929 19:19:59.881686 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/62bc84b7-9b21-447c-b1c3-21c4f178ba26-erlang-cookie-secret\") pod \"62bc84b7-9b21-447c-b1c3-21c4f178ba26\" (UID: \"62bc84b7-9b21-447c-b1c3-21c4f178ba26\") " Sep 29 19:19:59 crc kubenswrapper[4792]: I0929 19:19:59.881758 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/62bc84b7-9b21-447c-b1c3-21c4f178ba26-plugins-conf\") pod \"62bc84b7-9b21-447c-b1c3-21c4f178ba26\" (UID: \"62bc84b7-9b21-447c-b1c3-21c4f178ba26\") " Sep 29 19:19:59 crc kubenswrapper[4792]: I0929 19:19:59.881789 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/62bc84b7-9b21-447c-b1c3-21c4f178ba26-rabbitmq-tls\") pod \"62bc84b7-9b21-447c-b1c3-21c4f178ba26\" (UID: \"62bc84b7-9b21-447c-b1c3-21c4f178ba26\") " Sep 29 19:19:59 crc kubenswrapper[4792]: I0929 19:19:59.881822 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/62bc84b7-9b21-447c-b1c3-21c4f178ba26-rabbitmq-confd\") pod \"62bc84b7-9b21-447c-b1c3-21c4f178ba26\" (UID: \"62bc84b7-9b21-447c-b1c3-21c4f178ba26\") " Sep 29 19:19:59 crc kubenswrapper[4792]: I0929 19:19:59.881986 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/62bc84b7-9b21-447c-b1c3-21c4f178ba26-server-conf\") pod \"62bc84b7-9b21-447c-b1c3-21c4f178ba26\" (UID: \"62bc84b7-9b21-447c-b1c3-21c4f178ba26\") " Sep 29 19:19:59 crc kubenswrapper[4792]: I0929 19:19:59.882116 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4wznl\" (UniqueName: \"kubernetes.io/projected/62bc84b7-9b21-447c-b1c3-21c4f178ba26-kube-api-access-4wznl\") pod \"62bc84b7-9b21-447c-b1c3-21c4f178ba26\" (UID: \"62bc84b7-9b21-447c-b1c3-21c4f178ba26\") " Sep 29 19:19:59 crc kubenswrapper[4792]: I0929 19:19:59.882136 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/62bc84b7-9b21-447c-b1c3-21c4f178ba26-config-data\") pod \"62bc84b7-9b21-447c-b1c3-21c4f178ba26\" (UID: \"62bc84b7-9b21-447c-b1c3-21c4f178ba26\") " Sep 29 19:19:59 crc kubenswrapper[4792]: I0929 19:19:59.882266 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/62bc84b7-9b21-447c-b1c3-21c4f178ba26-rabbitmq-erlang-cookie\") pod \"62bc84b7-9b21-447c-b1c3-21c4f178ba26\" (UID: \"62bc84b7-9b21-447c-b1c3-21c4f178ba26\") " Sep 29 19:19:59 crc kubenswrapper[4792]: I0929 19:19:59.882301 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"persistence\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"62bc84b7-9b21-447c-b1c3-21c4f178ba26\" (UID: \"62bc84b7-9b21-447c-b1c3-21c4f178ba26\") " Sep 29 19:19:59 crc kubenswrapper[4792]: I0929 19:19:59.882322 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/62bc84b7-9b21-447c-b1c3-21c4f178ba26-rabbitmq-plugins\") pod \"62bc84b7-9b21-447c-b1c3-21c4f178ba26\" (UID: \"62bc84b7-9b21-447c-b1c3-21c4f178ba26\") " Sep 29 19:19:59 crc kubenswrapper[4792]: I0929 19:19:59.885107 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/62bc84b7-9b21-447c-b1c3-21c4f178ba26-rabbitmq-plugins" (OuterVolumeSpecName: "rabbitmq-plugins") pod "62bc84b7-9b21-447c-b1c3-21c4f178ba26" (UID: "62bc84b7-9b21-447c-b1c3-21c4f178ba26"). InnerVolumeSpecName "rabbitmq-plugins". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 19:19:59 crc kubenswrapper[4792]: I0929 19:19:59.887116 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/62bc84b7-9b21-447c-b1c3-21c4f178ba26-plugins-conf" (OuterVolumeSpecName: "plugins-conf") pod "62bc84b7-9b21-447c-b1c3-21c4f178ba26" (UID: "62bc84b7-9b21-447c-b1c3-21c4f178ba26"). InnerVolumeSpecName "plugins-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:19:59 crc kubenswrapper[4792]: I0929 19:19:59.902207 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage01-crc" (OuterVolumeSpecName: "persistence") pod "62bc84b7-9b21-447c-b1c3-21c4f178ba26" (UID: "62bc84b7-9b21-447c-b1c3-21c4f178ba26"). InnerVolumeSpecName "local-storage01-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Sep 29 19:19:59 crc kubenswrapper[4792]: I0929 19:19:59.902773 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/62bc84b7-9b21-447c-b1c3-21c4f178ba26-rabbitmq-erlang-cookie" (OuterVolumeSpecName: "rabbitmq-erlang-cookie") pod "62bc84b7-9b21-447c-b1c3-21c4f178ba26" (UID: "62bc84b7-9b21-447c-b1c3-21c4f178ba26"). InnerVolumeSpecName "rabbitmq-erlang-cookie". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 19:19:59 crc kubenswrapper[4792]: I0929 19:19:59.904615 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/62bc84b7-9b21-447c-b1c3-21c4f178ba26-pod-info" (OuterVolumeSpecName: "pod-info") pod "62bc84b7-9b21-447c-b1c3-21c4f178ba26" (UID: "62bc84b7-9b21-447c-b1c3-21c4f178ba26"). InnerVolumeSpecName "pod-info". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Sep 29 19:19:59 crc kubenswrapper[4792]: I0929 19:19:59.905141 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/62bc84b7-9b21-447c-b1c3-21c4f178ba26-rabbitmq-tls" (OuterVolumeSpecName: "rabbitmq-tls") pod "62bc84b7-9b21-447c-b1c3-21c4f178ba26" (UID: "62bc84b7-9b21-447c-b1c3-21c4f178ba26"). InnerVolumeSpecName "rabbitmq-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:19:59 crc kubenswrapper[4792]: I0929 19:19:59.905228 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/62bc84b7-9b21-447c-b1c3-21c4f178ba26-kube-api-access-4wznl" (OuterVolumeSpecName: "kube-api-access-4wznl") pod "62bc84b7-9b21-447c-b1c3-21c4f178ba26" (UID: "62bc84b7-9b21-447c-b1c3-21c4f178ba26"). InnerVolumeSpecName "kube-api-access-4wznl". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:19:59 crc kubenswrapper[4792]: I0929 19:19:59.915856 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/62bc84b7-9b21-447c-b1c3-21c4f178ba26-erlang-cookie-secret" (OuterVolumeSpecName: "erlang-cookie-secret") pod "62bc84b7-9b21-447c-b1c3-21c4f178ba26" (UID: "62bc84b7-9b21-447c-b1c3-21c4f178ba26"). InnerVolumeSpecName "erlang-cookie-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:19:59 crc kubenswrapper[4792]: I0929 19:19:59.947717 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/62bc84b7-9b21-447c-b1c3-21c4f178ba26-config-data" (OuterVolumeSpecName: "config-data") pod "62bc84b7-9b21-447c-b1c3-21c4f178ba26" (UID: "62bc84b7-9b21-447c-b1c3-21c4f178ba26"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:19:59 crc kubenswrapper[4792]: I0929 19:19:59.989393 4792 reconciler_common.go:293] "Volume detached for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/62bc84b7-9b21-447c-b1c3-21c4f178ba26-pod-info\") on node \"crc\" DevicePath \"\"" Sep 29 19:19:59 crc kubenswrapper[4792]: I0929 19:19:59.989430 4792 reconciler_common.go:293] "Volume detached for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/62bc84b7-9b21-447c-b1c3-21c4f178ba26-erlang-cookie-secret\") on node \"crc\" DevicePath \"\"" Sep 29 19:19:59 crc kubenswrapper[4792]: I0929 19:19:59.989442 4792 reconciler_common.go:293] "Volume detached for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/62bc84b7-9b21-447c-b1c3-21c4f178ba26-plugins-conf\") on node \"crc\" DevicePath \"\"" Sep 29 19:19:59 crc kubenswrapper[4792]: I0929 19:19:59.989450 4792 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/62bc84b7-9b21-447c-b1c3-21c4f178ba26-rabbitmq-tls\") on node \"crc\" DevicePath \"\"" Sep 29 19:19:59 crc kubenswrapper[4792]: I0929 19:19:59.990996 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4wznl\" (UniqueName: \"kubernetes.io/projected/62bc84b7-9b21-447c-b1c3-21c4f178ba26-kube-api-access-4wznl\") on node \"crc\" DevicePath \"\"" Sep 29 19:19:59 crc kubenswrapper[4792]: I0929 19:19:59.991006 4792 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/62bc84b7-9b21-447c-b1c3-21c4f178ba26-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 19:19:59 crc kubenswrapper[4792]: I0929 19:19:59.991017 4792 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/62bc84b7-9b21-447c-b1c3-21c4f178ba26-rabbitmq-erlang-cookie\") on node \"crc\" DevicePath \"\"" Sep 29 19:19:59 crc kubenswrapper[4792]: I0929 19:19:59.991062 4792 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") on node \"crc\" " Sep 29 19:19:59 crc kubenswrapper[4792]: I0929 19:19:59.991073 4792 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/62bc84b7-9b21-447c-b1c3-21c4f178ba26-rabbitmq-plugins\") on node \"crc\" DevicePath \"\"" Sep 29 19:19:59 crc kubenswrapper[4792]: I0929 19:19:59.995010 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/62bc84b7-9b21-447c-b1c3-21c4f178ba26-server-conf" (OuterVolumeSpecName: "server-conf") pod "62bc84b7-9b21-447c-b1c3-21c4f178ba26" (UID: "62bc84b7-9b21-447c-b1c3-21c4f178ba26"). InnerVolumeSpecName "server-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:20:00 crc kubenswrapper[4792]: I0929 19:20:00.024054 4792 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage01-crc" (UniqueName: "kubernetes.io/local-volume/local-storage01-crc") on node "crc" Sep 29 19:20:00 crc kubenswrapper[4792]: I0929 19:20:00.037060 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/62bc84b7-9b21-447c-b1c3-21c4f178ba26-rabbitmq-confd" (OuterVolumeSpecName: "rabbitmq-confd") pod "62bc84b7-9b21-447c-b1c3-21c4f178ba26" (UID: "62bc84b7-9b21-447c-b1c3-21c4f178ba26"). InnerVolumeSpecName "rabbitmq-confd". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:20:00 crc kubenswrapper[4792]: I0929 19:20:00.092608 4792 reconciler_common.go:293] "Volume detached for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/62bc84b7-9b21-447c-b1c3-21c4f178ba26-server-conf\") on node \"crc\" DevicePath \"\"" Sep 29 19:20:00 crc kubenswrapper[4792]: I0929 19:20:00.092637 4792 reconciler_common.go:293] "Volume detached for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") on node \"crc\" DevicePath \"\"" Sep 29 19:20:00 crc kubenswrapper[4792]: I0929 19:20:00.092646 4792 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/62bc84b7-9b21-447c-b1c3-21c4f178ba26-rabbitmq-confd\") on node \"crc\" DevicePath \"\"" Sep 29 19:20:00 crc kubenswrapper[4792]: I0929 19:20:00.236197 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"62bc84b7-9b21-447c-b1c3-21c4f178ba26","Type":"ContainerDied","Data":"062e455f1a835d3c9c0ecb7089ad18d69a0b52c3c14dd5286ef2350dcb9c1c57"} Sep 29 19:20:00 crc kubenswrapper[4792]: I0929 19:20:00.236241 4792 scope.go:117] "RemoveContainer" containerID="b526a1929b86be5299fbda31deae78e3be086c29bf1ddad0937a6ccfb383e8a0" Sep 29 19:20:00 crc kubenswrapper[4792]: I0929 19:20:00.236320 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Sep 29 19:20:00 crc kubenswrapper[4792]: I0929 19:20:00.269249 4792 scope.go:117] "RemoveContainer" containerID="b824c6459e83975ad329f7367ab1eeb34ec3ddd56a5772f86a8492a07ef970d8" Sep 29 19:20:00 crc kubenswrapper[4792]: I0929 19:20:00.276721 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Sep 29 19:20:00 crc kubenswrapper[4792]: I0929 19:20:00.326913 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Sep 29 19:20:00 crc kubenswrapper[4792]: I0929 19:20:00.351837 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Sep 29 19:20:00 crc kubenswrapper[4792]: I0929 19:20:00.392147 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Sep 29 19:20:00 crc kubenswrapper[4792]: E0929 19:20:00.406010 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="62bc84b7-9b21-447c-b1c3-21c4f178ba26" containerName="rabbitmq" Sep 29 19:20:00 crc kubenswrapper[4792]: I0929 19:20:00.406246 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="62bc84b7-9b21-447c-b1c3-21c4f178ba26" containerName="rabbitmq" Sep 29 19:20:00 crc kubenswrapper[4792]: E0929 19:20:00.406296 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="62bc84b7-9b21-447c-b1c3-21c4f178ba26" containerName="setup-container" Sep 29 19:20:00 crc kubenswrapper[4792]: I0929 19:20:00.406304 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="62bc84b7-9b21-447c-b1c3-21c4f178ba26" containerName="setup-container" Sep 29 19:20:00 crc kubenswrapper[4792]: I0929 19:20:00.406650 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="62bc84b7-9b21-447c-b1c3-21c4f178ba26" containerName="rabbitmq" Sep 29 19:20:00 crc kubenswrapper[4792]: I0929 19:20:00.407760 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Sep 29 19:20:00 crc kubenswrapper[4792]: I0929 19:20:00.413599 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-default-user" Sep 29 19:20:00 crc kubenswrapper[4792]: I0929 19:20:00.413791 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-server-dockercfg-whhj6" Sep 29 19:20:00 crc kubenswrapper[4792]: I0929 19:20:00.413975 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-erlang-cookie" Sep 29 19:20:00 crc kubenswrapper[4792]: I0929 19:20:00.419744 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-plugins-conf" Sep 29 19:20:00 crc kubenswrapper[4792]: I0929 19:20:00.419894 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-config-data" Sep 29 19:20:00 crc kubenswrapper[4792]: I0929 19:20:00.420207 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-cell1-svc" Sep 29 19:20:00 crc kubenswrapper[4792]: I0929 19:20:00.420557 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-server-conf" Sep 29 19:20:00 crc kubenswrapper[4792]: I0929 19:20:00.427297 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Sep 29 19:20:00 crc kubenswrapper[4792]: I0929 19:20:00.564636 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/4e364c89-8b07-427c-a59a-c4576f98ddf2-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"4e364c89-8b07-427c-a59a-c4576f98ddf2\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 19:20:00 crc kubenswrapper[4792]: I0929 19:20:00.564697 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/4e364c89-8b07-427c-a59a-c4576f98ddf2-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"4e364c89-8b07-427c-a59a-c4576f98ddf2\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 19:20:00 crc kubenswrapper[4792]: I0929 19:20:00.564733 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/4e364c89-8b07-427c-a59a-c4576f98ddf2-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"4e364c89-8b07-427c-a59a-c4576f98ddf2\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 19:20:00 crc kubenswrapper[4792]: I0929 19:20:00.564757 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/4e364c89-8b07-427c-a59a-c4576f98ddf2-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"4e364c89-8b07-427c-a59a-c4576f98ddf2\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 19:20:00 crc kubenswrapper[4792]: I0929 19:20:00.564775 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/4e364c89-8b07-427c-a59a-c4576f98ddf2-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"4e364c89-8b07-427c-a59a-c4576f98ddf2\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 19:20:00 crc kubenswrapper[4792]: I0929 19:20:00.564821 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/4e364c89-8b07-427c-a59a-c4576f98ddf2-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"4e364c89-8b07-427c-a59a-c4576f98ddf2\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 19:20:00 crc kubenswrapper[4792]: I0929 19:20:00.564857 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/4e364c89-8b07-427c-a59a-c4576f98ddf2-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"4e364c89-8b07-427c-a59a-c4576f98ddf2\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 19:20:00 crc kubenswrapper[4792]: I0929 19:20:00.564872 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qzt7w\" (UniqueName: \"kubernetes.io/projected/4e364c89-8b07-427c-a59a-c4576f98ddf2-kube-api-access-qzt7w\") pod \"rabbitmq-cell1-server-0\" (UID: \"4e364c89-8b07-427c-a59a-c4576f98ddf2\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 19:20:00 crc kubenswrapper[4792]: I0929 19:20:00.564892 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"4e364c89-8b07-427c-a59a-c4576f98ddf2\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 19:20:00 crc kubenswrapper[4792]: I0929 19:20:00.564906 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/4e364c89-8b07-427c-a59a-c4576f98ddf2-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"4e364c89-8b07-427c-a59a-c4576f98ddf2\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 19:20:00 crc kubenswrapper[4792]: I0929 19:20:00.564926 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/4e364c89-8b07-427c-a59a-c4576f98ddf2-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"4e364c89-8b07-427c-a59a-c4576f98ddf2\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 19:20:00 crc kubenswrapper[4792]: I0929 19:20:00.666346 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/4e364c89-8b07-427c-a59a-c4576f98ddf2-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"4e364c89-8b07-427c-a59a-c4576f98ddf2\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 19:20:00 crc kubenswrapper[4792]: I0929 19:20:00.666424 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/4e364c89-8b07-427c-a59a-c4576f98ddf2-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"4e364c89-8b07-427c-a59a-c4576f98ddf2\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 19:20:00 crc kubenswrapper[4792]: I0929 19:20:00.666456 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/4e364c89-8b07-427c-a59a-c4576f98ddf2-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"4e364c89-8b07-427c-a59a-c4576f98ddf2\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 19:20:00 crc kubenswrapper[4792]: I0929 19:20:00.666476 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/4e364c89-8b07-427c-a59a-c4576f98ddf2-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"4e364c89-8b07-427c-a59a-c4576f98ddf2\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 19:20:00 crc kubenswrapper[4792]: I0929 19:20:00.666528 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/4e364c89-8b07-427c-a59a-c4576f98ddf2-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"4e364c89-8b07-427c-a59a-c4576f98ddf2\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 19:20:00 crc kubenswrapper[4792]: I0929 19:20:00.666552 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/4e364c89-8b07-427c-a59a-c4576f98ddf2-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"4e364c89-8b07-427c-a59a-c4576f98ddf2\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 19:20:00 crc kubenswrapper[4792]: I0929 19:20:00.666567 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qzt7w\" (UniqueName: \"kubernetes.io/projected/4e364c89-8b07-427c-a59a-c4576f98ddf2-kube-api-access-qzt7w\") pod \"rabbitmq-cell1-server-0\" (UID: \"4e364c89-8b07-427c-a59a-c4576f98ddf2\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 19:20:00 crc kubenswrapper[4792]: I0929 19:20:00.666587 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"4e364c89-8b07-427c-a59a-c4576f98ddf2\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 19:20:00 crc kubenswrapper[4792]: I0929 19:20:00.666604 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/4e364c89-8b07-427c-a59a-c4576f98ddf2-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"4e364c89-8b07-427c-a59a-c4576f98ddf2\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 19:20:00 crc kubenswrapper[4792]: I0929 19:20:00.666622 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/4e364c89-8b07-427c-a59a-c4576f98ddf2-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"4e364c89-8b07-427c-a59a-c4576f98ddf2\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 19:20:00 crc kubenswrapper[4792]: I0929 19:20:00.666674 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/4e364c89-8b07-427c-a59a-c4576f98ddf2-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"4e364c89-8b07-427c-a59a-c4576f98ddf2\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 19:20:00 crc kubenswrapper[4792]: I0929 19:20:00.667583 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/4e364c89-8b07-427c-a59a-c4576f98ddf2-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"4e364c89-8b07-427c-a59a-c4576f98ddf2\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 19:20:00 crc kubenswrapper[4792]: I0929 19:20:00.667754 4792 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"4e364c89-8b07-427c-a59a-c4576f98ddf2\") device mount path \"/mnt/openstack/pv01\"" pod="openstack/rabbitmq-cell1-server-0" Sep 29 19:20:00 crc kubenswrapper[4792]: I0929 19:20:00.668041 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/4e364c89-8b07-427c-a59a-c4576f98ddf2-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"4e364c89-8b07-427c-a59a-c4576f98ddf2\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 19:20:00 crc kubenswrapper[4792]: I0929 19:20:00.668371 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/4e364c89-8b07-427c-a59a-c4576f98ddf2-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"4e364c89-8b07-427c-a59a-c4576f98ddf2\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 19:20:00 crc kubenswrapper[4792]: I0929 19:20:00.669886 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/4e364c89-8b07-427c-a59a-c4576f98ddf2-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"4e364c89-8b07-427c-a59a-c4576f98ddf2\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 19:20:00 crc kubenswrapper[4792]: I0929 19:20:00.670695 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/4e364c89-8b07-427c-a59a-c4576f98ddf2-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"4e364c89-8b07-427c-a59a-c4576f98ddf2\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 19:20:00 crc kubenswrapper[4792]: I0929 19:20:00.675084 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/4e364c89-8b07-427c-a59a-c4576f98ddf2-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"4e364c89-8b07-427c-a59a-c4576f98ddf2\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 19:20:00 crc kubenswrapper[4792]: I0929 19:20:00.675573 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/4e364c89-8b07-427c-a59a-c4576f98ddf2-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"4e364c89-8b07-427c-a59a-c4576f98ddf2\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 19:20:00 crc kubenswrapper[4792]: I0929 19:20:00.675957 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/4e364c89-8b07-427c-a59a-c4576f98ddf2-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"4e364c89-8b07-427c-a59a-c4576f98ddf2\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 19:20:00 crc kubenswrapper[4792]: I0929 19:20:00.676227 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/4e364c89-8b07-427c-a59a-c4576f98ddf2-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"4e364c89-8b07-427c-a59a-c4576f98ddf2\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 19:20:00 crc kubenswrapper[4792]: I0929 19:20:00.686462 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qzt7w\" (UniqueName: \"kubernetes.io/projected/4e364c89-8b07-427c-a59a-c4576f98ddf2-kube-api-access-qzt7w\") pod \"rabbitmq-cell1-server-0\" (UID: \"4e364c89-8b07-427c-a59a-c4576f98ddf2\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 19:20:00 crc kubenswrapper[4792]: I0929 19:20:00.695879 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"4e364c89-8b07-427c-a59a-c4576f98ddf2\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 19:20:00 crc kubenswrapper[4792]: I0929 19:20:00.802628 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Sep 29 19:20:01 crc kubenswrapper[4792]: I0929 19:20:01.038215 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="62bc84b7-9b21-447c-b1c3-21c4f178ba26" path="/var/lib/kubelet/pods/62bc84b7-9b21-447c-b1c3-21c4f178ba26/volumes" Sep 29 19:20:01 crc kubenswrapper[4792]: I0929 19:20:01.041259 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cf5405ae-97dd-404d-9b0c-4d0faaf961cb" path="/var/lib/kubelet/pods/cf5405ae-97dd-404d-9b0c-4d0faaf961cb/volumes" Sep 29 19:20:01 crc kubenswrapper[4792]: I0929 19:20:01.248809 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"dd9e8433-9eac-49a2-bacd-7acb220b0efd","Type":"ContainerStarted","Data":"1020885a420ebf33decb4eb4ec9127536b519a0483ee76cc36d12548a02b28f4"} Sep 29 19:20:01 crc kubenswrapper[4792]: I0929 19:20:01.248885 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"dd9e8433-9eac-49a2-bacd-7acb220b0efd","Type":"ContainerStarted","Data":"f297d415a395aa666e1727f8de0cac9518f6ab9ff2cd487d17f8982d73329531"} Sep 29 19:20:01 crc kubenswrapper[4792]: I0929 19:20:01.333869 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Sep 29 19:20:01 crc kubenswrapper[4792]: I0929 19:20:01.959576 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-d558885bc-skcsq"] Sep 29 19:20:01 crc kubenswrapper[4792]: I0929 19:20:01.961670 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-d558885bc-skcsq" Sep 29 19:20:01 crc kubenswrapper[4792]: I0929 19:20:01.967198 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-edpm-ipam" Sep 29 19:20:01 crc kubenswrapper[4792]: I0929 19:20:01.995938 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-d558885bc-skcsq"] Sep 29 19:20:02 crc kubenswrapper[4792]: I0929 19:20:02.097731 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/5c4ccfb1-8403-4880-b6af-254c2a9f7dac-openstack-edpm-ipam\") pod \"dnsmasq-dns-d558885bc-skcsq\" (UID: \"5c4ccfb1-8403-4880-b6af-254c2a9f7dac\") " pod="openstack/dnsmasq-dns-d558885bc-skcsq" Sep 29 19:20:02 crc kubenswrapper[4792]: I0929 19:20:02.097815 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5c4ccfb1-8403-4880-b6af-254c2a9f7dac-config\") pod \"dnsmasq-dns-d558885bc-skcsq\" (UID: \"5c4ccfb1-8403-4880-b6af-254c2a9f7dac\") " pod="openstack/dnsmasq-dns-d558885bc-skcsq" Sep 29 19:20:02 crc kubenswrapper[4792]: I0929 19:20:02.097976 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/5c4ccfb1-8403-4880-b6af-254c2a9f7dac-ovsdbserver-sb\") pod \"dnsmasq-dns-d558885bc-skcsq\" (UID: \"5c4ccfb1-8403-4880-b6af-254c2a9f7dac\") " pod="openstack/dnsmasq-dns-d558885bc-skcsq" Sep 29 19:20:02 crc kubenswrapper[4792]: I0929 19:20:02.098017 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vwzhb\" (UniqueName: \"kubernetes.io/projected/5c4ccfb1-8403-4880-b6af-254c2a9f7dac-kube-api-access-vwzhb\") pod \"dnsmasq-dns-d558885bc-skcsq\" (UID: \"5c4ccfb1-8403-4880-b6af-254c2a9f7dac\") " pod="openstack/dnsmasq-dns-d558885bc-skcsq" Sep 29 19:20:02 crc kubenswrapper[4792]: I0929 19:20:02.098066 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/5c4ccfb1-8403-4880-b6af-254c2a9f7dac-dns-swift-storage-0\") pod \"dnsmasq-dns-d558885bc-skcsq\" (UID: \"5c4ccfb1-8403-4880-b6af-254c2a9f7dac\") " pod="openstack/dnsmasq-dns-d558885bc-skcsq" Sep 29 19:20:02 crc kubenswrapper[4792]: I0929 19:20:02.098107 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5c4ccfb1-8403-4880-b6af-254c2a9f7dac-dns-svc\") pod \"dnsmasq-dns-d558885bc-skcsq\" (UID: \"5c4ccfb1-8403-4880-b6af-254c2a9f7dac\") " pod="openstack/dnsmasq-dns-d558885bc-skcsq" Sep 29 19:20:02 crc kubenswrapper[4792]: I0929 19:20:02.098184 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/5c4ccfb1-8403-4880-b6af-254c2a9f7dac-ovsdbserver-nb\") pod \"dnsmasq-dns-d558885bc-skcsq\" (UID: \"5c4ccfb1-8403-4880-b6af-254c2a9f7dac\") " pod="openstack/dnsmasq-dns-d558885bc-skcsq" Sep 29 19:20:02 crc kubenswrapper[4792]: I0929 19:20:02.199588 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/5c4ccfb1-8403-4880-b6af-254c2a9f7dac-ovsdbserver-sb\") pod \"dnsmasq-dns-d558885bc-skcsq\" (UID: \"5c4ccfb1-8403-4880-b6af-254c2a9f7dac\") " pod="openstack/dnsmasq-dns-d558885bc-skcsq" Sep 29 19:20:02 crc kubenswrapper[4792]: I0929 19:20:02.199636 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vwzhb\" (UniqueName: \"kubernetes.io/projected/5c4ccfb1-8403-4880-b6af-254c2a9f7dac-kube-api-access-vwzhb\") pod \"dnsmasq-dns-d558885bc-skcsq\" (UID: \"5c4ccfb1-8403-4880-b6af-254c2a9f7dac\") " pod="openstack/dnsmasq-dns-d558885bc-skcsq" Sep 29 19:20:02 crc kubenswrapper[4792]: I0929 19:20:02.200011 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/5c4ccfb1-8403-4880-b6af-254c2a9f7dac-dns-swift-storage-0\") pod \"dnsmasq-dns-d558885bc-skcsq\" (UID: \"5c4ccfb1-8403-4880-b6af-254c2a9f7dac\") " pod="openstack/dnsmasq-dns-d558885bc-skcsq" Sep 29 19:20:02 crc kubenswrapper[4792]: I0929 19:20:02.200639 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/5c4ccfb1-8403-4880-b6af-254c2a9f7dac-ovsdbserver-sb\") pod \"dnsmasq-dns-d558885bc-skcsq\" (UID: \"5c4ccfb1-8403-4880-b6af-254c2a9f7dac\") " pod="openstack/dnsmasq-dns-d558885bc-skcsq" Sep 29 19:20:02 crc kubenswrapper[4792]: I0929 19:20:02.200813 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/5c4ccfb1-8403-4880-b6af-254c2a9f7dac-dns-swift-storage-0\") pod \"dnsmasq-dns-d558885bc-skcsq\" (UID: \"5c4ccfb1-8403-4880-b6af-254c2a9f7dac\") " pod="openstack/dnsmasq-dns-d558885bc-skcsq" Sep 29 19:20:02 crc kubenswrapper[4792]: I0929 19:20:02.201373 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5c4ccfb1-8403-4880-b6af-254c2a9f7dac-dns-svc\") pod \"dnsmasq-dns-d558885bc-skcsq\" (UID: \"5c4ccfb1-8403-4880-b6af-254c2a9f7dac\") " pod="openstack/dnsmasq-dns-d558885bc-skcsq" Sep 29 19:20:02 crc kubenswrapper[4792]: I0929 19:20:02.200870 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5c4ccfb1-8403-4880-b6af-254c2a9f7dac-dns-svc\") pod \"dnsmasq-dns-d558885bc-skcsq\" (UID: \"5c4ccfb1-8403-4880-b6af-254c2a9f7dac\") " pod="openstack/dnsmasq-dns-d558885bc-skcsq" Sep 29 19:20:02 crc kubenswrapper[4792]: I0929 19:20:02.201971 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/5c4ccfb1-8403-4880-b6af-254c2a9f7dac-ovsdbserver-nb\") pod \"dnsmasq-dns-d558885bc-skcsq\" (UID: \"5c4ccfb1-8403-4880-b6af-254c2a9f7dac\") " pod="openstack/dnsmasq-dns-d558885bc-skcsq" Sep 29 19:20:02 crc kubenswrapper[4792]: I0929 19:20:02.202514 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/5c4ccfb1-8403-4880-b6af-254c2a9f7dac-ovsdbserver-nb\") pod \"dnsmasq-dns-d558885bc-skcsq\" (UID: \"5c4ccfb1-8403-4880-b6af-254c2a9f7dac\") " pod="openstack/dnsmasq-dns-d558885bc-skcsq" Sep 29 19:20:02 crc kubenswrapper[4792]: I0929 19:20:02.202696 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/5c4ccfb1-8403-4880-b6af-254c2a9f7dac-openstack-edpm-ipam\") pod \"dnsmasq-dns-d558885bc-skcsq\" (UID: \"5c4ccfb1-8403-4880-b6af-254c2a9f7dac\") " pod="openstack/dnsmasq-dns-d558885bc-skcsq" Sep 29 19:20:02 crc kubenswrapper[4792]: I0929 19:20:02.202764 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5c4ccfb1-8403-4880-b6af-254c2a9f7dac-config\") pod \"dnsmasq-dns-d558885bc-skcsq\" (UID: \"5c4ccfb1-8403-4880-b6af-254c2a9f7dac\") " pod="openstack/dnsmasq-dns-d558885bc-skcsq" Sep 29 19:20:02 crc kubenswrapper[4792]: I0929 19:20:02.203453 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5c4ccfb1-8403-4880-b6af-254c2a9f7dac-config\") pod \"dnsmasq-dns-d558885bc-skcsq\" (UID: \"5c4ccfb1-8403-4880-b6af-254c2a9f7dac\") " pod="openstack/dnsmasq-dns-d558885bc-skcsq" Sep 29 19:20:02 crc kubenswrapper[4792]: I0929 19:20:02.203682 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/5c4ccfb1-8403-4880-b6af-254c2a9f7dac-openstack-edpm-ipam\") pod \"dnsmasq-dns-d558885bc-skcsq\" (UID: \"5c4ccfb1-8403-4880-b6af-254c2a9f7dac\") " pod="openstack/dnsmasq-dns-d558885bc-skcsq" Sep 29 19:20:02 crc kubenswrapper[4792]: I0929 19:20:02.232583 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vwzhb\" (UniqueName: \"kubernetes.io/projected/5c4ccfb1-8403-4880-b6af-254c2a9f7dac-kube-api-access-vwzhb\") pod \"dnsmasq-dns-d558885bc-skcsq\" (UID: \"5c4ccfb1-8403-4880-b6af-254c2a9f7dac\") " pod="openstack/dnsmasq-dns-d558885bc-skcsq" Sep 29 19:20:02 crc kubenswrapper[4792]: I0929 19:20:02.261295 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"4e364c89-8b07-427c-a59a-c4576f98ddf2","Type":"ContainerStarted","Data":"5fdc2a1cf93c9ae4f28622fc359a167fc2b9ca22c5be2324ab1a28b364140a42"} Sep 29 19:20:02 crc kubenswrapper[4792]: I0929 19:20:02.262188 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"4e364c89-8b07-427c-a59a-c4576f98ddf2","Type":"ContainerStarted","Data":"5a5581fbdcfcb204046fbdf936a24f05a0de35917dcd246e3f8b4a5dccf51e93"} Sep 29 19:20:02 crc kubenswrapper[4792]: I0929 19:20:02.277252 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-d558885bc-skcsq" Sep 29 19:20:02 crc kubenswrapper[4792]: I0929 19:20:02.844990 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-d558885bc-skcsq"] Sep 29 19:20:03 crc kubenswrapper[4792]: I0929 19:20:03.270275 4792 generic.go:334] "Generic (PLEG): container finished" podID="5c4ccfb1-8403-4880-b6af-254c2a9f7dac" containerID="851db6f8175062a8626b099efa3b0c0b7052130c4ddd9e8aa38ae974d4d82f9c" exitCode=0 Sep 29 19:20:03 crc kubenswrapper[4792]: I0929 19:20:03.270343 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-d558885bc-skcsq" event={"ID":"5c4ccfb1-8403-4880-b6af-254c2a9f7dac","Type":"ContainerDied","Data":"851db6f8175062a8626b099efa3b0c0b7052130c4ddd9e8aa38ae974d4d82f9c"} Sep 29 19:20:03 crc kubenswrapper[4792]: I0929 19:20:03.270621 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-d558885bc-skcsq" event={"ID":"5c4ccfb1-8403-4880-b6af-254c2a9f7dac","Type":"ContainerStarted","Data":"a9fdb47524028a5d4efe3a1369bf1e82f386727eb0c49aa4e6bbcf6f33d972c0"} Sep 29 19:20:04 crc kubenswrapper[4792]: I0929 19:20:04.282083 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-d558885bc-skcsq" event={"ID":"5c4ccfb1-8403-4880-b6af-254c2a9f7dac","Type":"ContainerStarted","Data":"4089576a821e35a591900e7033cf50f445aa7a19b2841a0ac33bda890d0a020a"} Sep 29 19:20:04 crc kubenswrapper[4792]: I0929 19:20:04.283273 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-d558885bc-skcsq" Sep 29 19:20:04 crc kubenswrapper[4792]: I0929 19:20:04.310674 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-d558885bc-skcsq" podStartSLOduration=3.310656485 podStartE2EDuration="3.310656485s" podCreationTimestamp="2025-09-29 19:20:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 19:20:04.305258594 +0000 UTC m=+1416.298565980" watchObservedRunningTime="2025-09-29 19:20:04.310656485 +0000 UTC m=+1416.303963881" Sep 29 19:20:11 crc kubenswrapper[4792]: I0929 19:20:11.960129 4792 patch_prober.go:28] interesting pod/machine-config-daemon-p5q59 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 19:20:11 crc kubenswrapper[4792]: I0929 19:20:11.960521 4792 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" podUID="0ae66548-086e-4ca9-bd6f-281ce46e7557" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 19:20:12 crc kubenswrapper[4792]: I0929 19:20:12.279012 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-d558885bc-skcsq" Sep 29 19:20:12 crc kubenswrapper[4792]: I0929 19:20:12.361361 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-cd5cbd7b9-jgj5c"] Sep 29 19:20:12 crc kubenswrapper[4792]: I0929 19:20:12.361839 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-cd5cbd7b9-jgj5c" podUID="2c819d6e-de1e-4f6a-8135-ee279636481b" containerName="dnsmasq-dns" containerID="cri-o://ac93fb19563706192b5e969214a70894422756e19f89e1e0a014c49c69548f18" gracePeriod=10 Sep 29 19:20:12 crc kubenswrapper[4792]: I0929 19:20:12.551161 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-67cb876dc9-v2rp9"] Sep 29 19:20:12 crc kubenswrapper[4792]: I0929 19:20:12.553253 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-67cb876dc9-v2rp9" Sep 29 19:20:12 crc kubenswrapper[4792]: I0929 19:20:12.581188 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-67cb876dc9-v2rp9"] Sep 29 19:20:12 crc kubenswrapper[4792]: I0929 19:20:12.617292 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/6d1053e6-7c5c-4c2c-828d-c9241606b3e1-ovsdbserver-sb\") pod \"dnsmasq-dns-67cb876dc9-v2rp9\" (UID: \"6d1053e6-7c5c-4c2c-828d-c9241606b3e1\") " pod="openstack/dnsmasq-dns-67cb876dc9-v2rp9" Sep 29 19:20:12 crc kubenswrapper[4792]: I0929 19:20:12.617424 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6d1053e6-7c5c-4c2c-828d-c9241606b3e1-config\") pod \"dnsmasq-dns-67cb876dc9-v2rp9\" (UID: \"6d1053e6-7c5c-4c2c-828d-c9241606b3e1\") " pod="openstack/dnsmasq-dns-67cb876dc9-v2rp9" Sep 29 19:20:12 crc kubenswrapper[4792]: I0929 19:20:12.617444 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/6d1053e6-7c5c-4c2c-828d-c9241606b3e1-dns-swift-storage-0\") pod \"dnsmasq-dns-67cb876dc9-v2rp9\" (UID: \"6d1053e6-7c5c-4c2c-828d-c9241606b3e1\") " pod="openstack/dnsmasq-dns-67cb876dc9-v2rp9" Sep 29 19:20:12 crc kubenswrapper[4792]: I0929 19:20:12.617463 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/6d1053e6-7c5c-4c2c-828d-c9241606b3e1-openstack-edpm-ipam\") pod \"dnsmasq-dns-67cb876dc9-v2rp9\" (UID: \"6d1053e6-7c5c-4c2c-828d-c9241606b3e1\") " pod="openstack/dnsmasq-dns-67cb876dc9-v2rp9" Sep 29 19:20:12 crc kubenswrapper[4792]: I0929 19:20:12.617499 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/6d1053e6-7c5c-4c2c-828d-c9241606b3e1-ovsdbserver-nb\") pod \"dnsmasq-dns-67cb876dc9-v2rp9\" (UID: \"6d1053e6-7c5c-4c2c-828d-c9241606b3e1\") " pod="openstack/dnsmasq-dns-67cb876dc9-v2rp9" Sep 29 19:20:12 crc kubenswrapper[4792]: I0929 19:20:12.617538 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n6j24\" (UniqueName: \"kubernetes.io/projected/6d1053e6-7c5c-4c2c-828d-c9241606b3e1-kube-api-access-n6j24\") pod \"dnsmasq-dns-67cb876dc9-v2rp9\" (UID: \"6d1053e6-7c5c-4c2c-828d-c9241606b3e1\") " pod="openstack/dnsmasq-dns-67cb876dc9-v2rp9" Sep 29 19:20:12 crc kubenswrapper[4792]: I0929 19:20:12.617580 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/6d1053e6-7c5c-4c2c-828d-c9241606b3e1-dns-svc\") pod \"dnsmasq-dns-67cb876dc9-v2rp9\" (UID: \"6d1053e6-7c5c-4c2c-828d-c9241606b3e1\") " pod="openstack/dnsmasq-dns-67cb876dc9-v2rp9" Sep 29 19:20:12 crc kubenswrapper[4792]: I0929 19:20:12.719467 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6d1053e6-7c5c-4c2c-828d-c9241606b3e1-config\") pod \"dnsmasq-dns-67cb876dc9-v2rp9\" (UID: \"6d1053e6-7c5c-4c2c-828d-c9241606b3e1\") " pod="openstack/dnsmasq-dns-67cb876dc9-v2rp9" Sep 29 19:20:12 crc kubenswrapper[4792]: I0929 19:20:12.719505 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/6d1053e6-7c5c-4c2c-828d-c9241606b3e1-dns-swift-storage-0\") pod \"dnsmasq-dns-67cb876dc9-v2rp9\" (UID: \"6d1053e6-7c5c-4c2c-828d-c9241606b3e1\") " pod="openstack/dnsmasq-dns-67cb876dc9-v2rp9" Sep 29 19:20:12 crc kubenswrapper[4792]: I0929 19:20:12.719528 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/6d1053e6-7c5c-4c2c-828d-c9241606b3e1-openstack-edpm-ipam\") pod \"dnsmasq-dns-67cb876dc9-v2rp9\" (UID: \"6d1053e6-7c5c-4c2c-828d-c9241606b3e1\") " pod="openstack/dnsmasq-dns-67cb876dc9-v2rp9" Sep 29 19:20:12 crc kubenswrapper[4792]: I0929 19:20:12.719561 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/6d1053e6-7c5c-4c2c-828d-c9241606b3e1-ovsdbserver-nb\") pod \"dnsmasq-dns-67cb876dc9-v2rp9\" (UID: \"6d1053e6-7c5c-4c2c-828d-c9241606b3e1\") " pod="openstack/dnsmasq-dns-67cb876dc9-v2rp9" Sep 29 19:20:12 crc kubenswrapper[4792]: I0929 19:20:12.719598 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n6j24\" (UniqueName: \"kubernetes.io/projected/6d1053e6-7c5c-4c2c-828d-c9241606b3e1-kube-api-access-n6j24\") pod \"dnsmasq-dns-67cb876dc9-v2rp9\" (UID: \"6d1053e6-7c5c-4c2c-828d-c9241606b3e1\") " pod="openstack/dnsmasq-dns-67cb876dc9-v2rp9" Sep 29 19:20:12 crc kubenswrapper[4792]: I0929 19:20:12.719634 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/6d1053e6-7c5c-4c2c-828d-c9241606b3e1-dns-svc\") pod \"dnsmasq-dns-67cb876dc9-v2rp9\" (UID: \"6d1053e6-7c5c-4c2c-828d-c9241606b3e1\") " pod="openstack/dnsmasq-dns-67cb876dc9-v2rp9" Sep 29 19:20:12 crc kubenswrapper[4792]: I0929 19:20:12.719650 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/6d1053e6-7c5c-4c2c-828d-c9241606b3e1-ovsdbserver-sb\") pod \"dnsmasq-dns-67cb876dc9-v2rp9\" (UID: \"6d1053e6-7c5c-4c2c-828d-c9241606b3e1\") " pod="openstack/dnsmasq-dns-67cb876dc9-v2rp9" Sep 29 19:20:12 crc kubenswrapper[4792]: I0929 19:20:12.720646 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/6d1053e6-7c5c-4c2c-828d-c9241606b3e1-dns-svc\") pod \"dnsmasq-dns-67cb876dc9-v2rp9\" (UID: \"6d1053e6-7c5c-4c2c-828d-c9241606b3e1\") " pod="openstack/dnsmasq-dns-67cb876dc9-v2rp9" Sep 29 19:20:12 crc kubenswrapper[4792]: I0929 19:20:12.721480 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/6d1053e6-7c5c-4c2c-828d-c9241606b3e1-ovsdbserver-sb\") pod \"dnsmasq-dns-67cb876dc9-v2rp9\" (UID: \"6d1053e6-7c5c-4c2c-828d-c9241606b3e1\") " pod="openstack/dnsmasq-dns-67cb876dc9-v2rp9" Sep 29 19:20:12 crc kubenswrapper[4792]: I0929 19:20:12.721746 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/6d1053e6-7c5c-4c2c-828d-c9241606b3e1-ovsdbserver-nb\") pod \"dnsmasq-dns-67cb876dc9-v2rp9\" (UID: \"6d1053e6-7c5c-4c2c-828d-c9241606b3e1\") " pod="openstack/dnsmasq-dns-67cb876dc9-v2rp9" Sep 29 19:20:12 crc kubenswrapper[4792]: I0929 19:20:12.722976 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/6d1053e6-7c5c-4c2c-828d-c9241606b3e1-openstack-edpm-ipam\") pod \"dnsmasq-dns-67cb876dc9-v2rp9\" (UID: \"6d1053e6-7c5c-4c2c-828d-c9241606b3e1\") " pod="openstack/dnsmasq-dns-67cb876dc9-v2rp9" Sep 29 19:20:12 crc kubenswrapper[4792]: I0929 19:20:12.723021 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/6d1053e6-7c5c-4c2c-828d-c9241606b3e1-dns-swift-storage-0\") pod \"dnsmasq-dns-67cb876dc9-v2rp9\" (UID: \"6d1053e6-7c5c-4c2c-828d-c9241606b3e1\") " pod="openstack/dnsmasq-dns-67cb876dc9-v2rp9" Sep 29 19:20:12 crc kubenswrapper[4792]: I0929 19:20:12.723528 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6d1053e6-7c5c-4c2c-828d-c9241606b3e1-config\") pod \"dnsmasq-dns-67cb876dc9-v2rp9\" (UID: \"6d1053e6-7c5c-4c2c-828d-c9241606b3e1\") " pod="openstack/dnsmasq-dns-67cb876dc9-v2rp9" Sep 29 19:20:12 crc kubenswrapper[4792]: I0929 19:20:12.750184 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n6j24\" (UniqueName: \"kubernetes.io/projected/6d1053e6-7c5c-4c2c-828d-c9241606b3e1-kube-api-access-n6j24\") pod \"dnsmasq-dns-67cb876dc9-v2rp9\" (UID: \"6d1053e6-7c5c-4c2c-828d-c9241606b3e1\") " pod="openstack/dnsmasq-dns-67cb876dc9-v2rp9" Sep 29 19:20:12 crc kubenswrapper[4792]: I0929 19:20:12.888562 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-67cb876dc9-v2rp9" Sep 29 19:20:13 crc kubenswrapper[4792]: I0929 19:20:13.000526 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-cd5cbd7b9-jgj5c" Sep 29 19:20:13 crc kubenswrapper[4792]: I0929 19:20:13.126288 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/2c819d6e-de1e-4f6a-8135-ee279636481b-ovsdbserver-nb\") pod \"2c819d6e-de1e-4f6a-8135-ee279636481b\" (UID: \"2c819d6e-de1e-4f6a-8135-ee279636481b\") " Sep 29 19:20:13 crc kubenswrapper[4792]: I0929 19:20:13.126339 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2c819d6e-de1e-4f6a-8135-ee279636481b-dns-svc\") pod \"2c819d6e-de1e-4f6a-8135-ee279636481b\" (UID: \"2c819d6e-de1e-4f6a-8135-ee279636481b\") " Sep 29 19:20:13 crc kubenswrapper[4792]: I0929 19:20:13.126418 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/2c819d6e-de1e-4f6a-8135-ee279636481b-ovsdbserver-sb\") pod \"2c819d6e-de1e-4f6a-8135-ee279636481b\" (UID: \"2c819d6e-de1e-4f6a-8135-ee279636481b\") " Sep 29 19:20:13 crc kubenswrapper[4792]: I0929 19:20:13.126460 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/2c819d6e-de1e-4f6a-8135-ee279636481b-dns-swift-storage-0\") pod \"2c819d6e-de1e-4f6a-8135-ee279636481b\" (UID: \"2c819d6e-de1e-4f6a-8135-ee279636481b\") " Sep 29 19:20:13 crc kubenswrapper[4792]: I0929 19:20:13.126510 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bbgnx\" (UniqueName: \"kubernetes.io/projected/2c819d6e-de1e-4f6a-8135-ee279636481b-kube-api-access-bbgnx\") pod \"2c819d6e-de1e-4f6a-8135-ee279636481b\" (UID: \"2c819d6e-de1e-4f6a-8135-ee279636481b\") " Sep 29 19:20:13 crc kubenswrapper[4792]: I0929 19:20:13.126553 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2c819d6e-de1e-4f6a-8135-ee279636481b-config\") pod \"2c819d6e-de1e-4f6a-8135-ee279636481b\" (UID: \"2c819d6e-de1e-4f6a-8135-ee279636481b\") " Sep 29 19:20:13 crc kubenswrapper[4792]: I0929 19:20:13.159642 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2c819d6e-de1e-4f6a-8135-ee279636481b-kube-api-access-bbgnx" (OuterVolumeSpecName: "kube-api-access-bbgnx") pod "2c819d6e-de1e-4f6a-8135-ee279636481b" (UID: "2c819d6e-de1e-4f6a-8135-ee279636481b"). InnerVolumeSpecName "kube-api-access-bbgnx". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:20:13 crc kubenswrapper[4792]: I0929 19:20:13.207881 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2c819d6e-de1e-4f6a-8135-ee279636481b-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "2c819d6e-de1e-4f6a-8135-ee279636481b" (UID: "2c819d6e-de1e-4f6a-8135-ee279636481b"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:20:13 crc kubenswrapper[4792]: I0929 19:20:13.215238 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2c819d6e-de1e-4f6a-8135-ee279636481b-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "2c819d6e-de1e-4f6a-8135-ee279636481b" (UID: "2c819d6e-de1e-4f6a-8135-ee279636481b"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:20:13 crc kubenswrapper[4792]: I0929 19:20:13.223279 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2c819d6e-de1e-4f6a-8135-ee279636481b-config" (OuterVolumeSpecName: "config") pod "2c819d6e-de1e-4f6a-8135-ee279636481b" (UID: "2c819d6e-de1e-4f6a-8135-ee279636481b"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:20:13 crc kubenswrapper[4792]: I0929 19:20:13.229080 4792 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/2c819d6e-de1e-4f6a-8135-ee279636481b-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Sep 29 19:20:13 crc kubenswrapper[4792]: I0929 19:20:13.229110 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bbgnx\" (UniqueName: \"kubernetes.io/projected/2c819d6e-de1e-4f6a-8135-ee279636481b-kube-api-access-bbgnx\") on node \"crc\" DevicePath \"\"" Sep 29 19:20:13 crc kubenswrapper[4792]: I0929 19:20:13.229121 4792 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2c819d6e-de1e-4f6a-8135-ee279636481b-config\") on node \"crc\" DevicePath \"\"" Sep 29 19:20:13 crc kubenswrapper[4792]: I0929 19:20:13.229131 4792 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/2c819d6e-de1e-4f6a-8135-ee279636481b-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Sep 29 19:20:13 crc kubenswrapper[4792]: I0929 19:20:13.240305 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2c819d6e-de1e-4f6a-8135-ee279636481b-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "2c819d6e-de1e-4f6a-8135-ee279636481b" (UID: "2c819d6e-de1e-4f6a-8135-ee279636481b"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:20:13 crc kubenswrapper[4792]: I0929 19:20:13.248759 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2c819d6e-de1e-4f6a-8135-ee279636481b-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "2c819d6e-de1e-4f6a-8135-ee279636481b" (UID: "2c819d6e-de1e-4f6a-8135-ee279636481b"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:20:13 crc kubenswrapper[4792]: I0929 19:20:13.330528 4792 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2c819d6e-de1e-4f6a-8135-ee279636481b-dns-svc\") on node \"crc\" DevicePath \"\"" Sep 29 19:20:13 crc kubenswrapper[4792]: I0929 19:20:13.330557 4792 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/2c819d6e-de1e-4f6a-8135-ee279636481b-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Sep 29 19:20:13 crc kubenswrapper[4792]: I0929 19:20:13.375497 4792 generic.go:334] "Generic (PLEG): container finished" podID="2c819d6e-de1e-4f6a-8135-ee279636481b" containerID="ac93fb19563706192b5e969214a70894422756e19f89e1e0a014c49c69548f18" exitCode=0 Sep 29 19:20:13 crc kubenswrapper[4792]: I0929 19:20:13.375539 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-cd5cbd7b9-jgj5c" event={"ID":"2c819d6e-de1e-4f6a-8135-ee279636481b","Type":"ContainerDied","Data":"ac93fb19563706192b5e969214a70894422756e19f89e1e0a014c49c69548f18"} Sep 29 19:20:13 crc kubenswrapper[4792]: I0929 19:20:13.375566 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-cd5cbd7b9-jgj5c" event={"ID":"2c819d6e-de1e-4f6a-8135-ee279636481b","Type":"ContainerDied","Data":"ec722ac38462fff60007ed89f917d5e2c786ef1c5dae2f455cf1fb9a25ce04ca"} Sep 29 19:20:13 crc kubenswrapper[4792]: I0929 19:20:13.375584 4792 scope.go:117] "RemoveContainer" containerID="ac93fb19563706192b5e969214a70894422756e19f89e1e0a014c49c69548f18" Sep 29 19:20:13 crc kubenswrapper[4792]: I0929 19:20:13.375708 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-cd5cbd7b9-jgj5c" Sep 29 19:20:13 crc kubenswrapper[4792]: I0929 19:20:13.418112 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-cd5cbd7b9-jgj5c"] Sep 29 19:20:13 crc kubenswrapper[4792]: I0929 19:20:13.436212 4792 scope.go:117] "RemoveContainer" containerID="400e0770911bb82b381bedb17f63a143e63b6f2f9098e21b968def592e658603" Sep 29 19:20:13 crc kubenswrapper[4792]: I0929 19:20:13.442464 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-cd5cbd7b9-jgj5c"] Sep 29 19:20:13 crc kubenswrapper[4792]: I0929 19:20:13.466622 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-67cb876dc9-v2rp9"] Sep 29 19:20:13 crc kubenswrapper[4792]: I0929 19:20:13.478161 4792 scope.go:117] "RemoveContainer" containerID="ac93fb19563706192b5e969214a70894422756e19f89e1e0a014c49c69548f18" Sep 29 19:20:13 crc kubenswrapper[4792]: E0929 19:20:13.481416 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ac93fb19563706192b5e969214a70894422756e19f89e1e0a014c49c69548f18\": container with ID starting with ac93fb19563706192b5e969214a70894422756e19f89e1e0a014c49c69548f18 not found: ID does not exist" containerID="ac93fb19563706192b5e969214a70894422756e19f89e1e0a014c49c69548f18" Sep 29 19:20:13 crc kubenswrapper[4792]: I0929 19:20:13.481474 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ac93fb19563706192b5e969214a70894422756e19f89e1e0a014c49c69548f18"} err="failed to get container status \"ac93fb19563706192b5e969214a70894422756e19f89e1e0a014c49c69548f18\": rpc error: code = NotFound desc = could not find container \"ac93fb19563706192b5e969214a70894422756e19f89e1e0a014c49c69548f18\": container with ID starting with ac93fb19563706192b5e969214a70894422756e19f89e1e0a014c49c69548f18 not found: ID does not exist" Sep 29 19:20:13 crc kubenswrapper[4792]: I0929 19:20:13.481509 4792 scope.go:117] "RemoveContainer" containerID="400e0770911bb82b381bedb17f63a143e63b6f2f9098e21b968def592e658603" Sep 29 19:20:13 crc kubenswrapper[4792]: E0929 19:20:13.482986 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"400e0770911bb82b381bedb17f63a143e63b6f2f9098e21b968def592e658603\": container with ID starting with 400e0770911bb82b381bedb17f63a143e63b6f2f9098e21b968def592e658603 not found: ID does not exist" containerID="400e0770911bb82b381bedb17f63a143e63b6f2f9098e21b968def592e658603" Sep 29 19:20:13 crc kubenswrapper[4792]: I0929 19:20:13.483031 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"400e0770911bb82b381bedb17f63a143e63b6f2f9098e21b968def592e658603"} err="failed to get container status \"400e0770911bb82b381bedb17f63a143e63b6f2f9098e21b968def592e658603\": rpc error: code = NotFound desc = could not find container \"400e0770911bb82b381bedb17f63a143e63b6f2f9098e21b968def592e658603\": container with ID starting with 400e0770911bb82b381bedb17f63a143e63b6f2f9098e21b968def592e658603 not found: ID does not exist" Sep 29 19:20:13 crc kubenswrapper[4792]: I0929 19:20:13.540893 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-s6z9t"] Sep 29 19:20:13 crc kubenswrapper[4792]: E0929 19:20:13.541343 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2c819d6e-de1e-4f6a-8135-ee279636481b" containerName="init" Sep 29 19:20:13 crc kubenswrapper[4792]: I0929 19:20:13.541359 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="2c819d6e-de1e-4f6a-8135-ee279636481b" containerName="init" Sep 29 19:20:13 crc kubenswrapper[4792]: E0929 19:20:13.541374 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2c819d6e-de1e-4f6a-8135-ee279636481b" containerName="dnsmasq-dns" Sep 29 19:20:13 crc kubenswrapper[4792]: I0929 19:20:13.541383 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="2c819d6e-de1e-4f6a-8135-ee279636481b" containerName="dnsmasq-dns" Sep 29 19:20:13 crc kubenswrapper[4792]: I0929 19:20:13.541563 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="2c819d6e-de1e-4f6a-8135-ee279636481b" containerName="dnsmasq-dns" Sep 29 19:20:13 crc kubenswrapper[4792]: I0929 19:20:13.542951 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-s6z9t" Sep 29 19:20:13 crc kubenswrapper[4792]: I0929 19:20:13.589830 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-s6z9t"] Sep 29 19:20:13 crc kubenswrapper[4792]: I0929 19:20:13.640970 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/16c021a5-5e9d-4b49-8952-0ab84e2f2428-catalog-content\") pod \"community-operators-s6z9t\" (UID: \"16c021a5-5e9d-4b49-8952-0ab84e2f2428\") " pod="openshift-marketplace/community-operators-s6z9t" Sep 29 19:20:13 crc kubenswrapper[4792]: I0929 19:20:13.641064 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/16c021a5-5e9d-4b49-8952-0ab84e2f2428-utilities\") pod \"community-operators-s6z9t\" (UID: \"16c021a5-5e9d-4b49-8952-0ab84e2f2428\") " pod="openshift-marketplace/community-operators-s6z9t" Sep 29 19:20:13 crc kubenswrapper[4792]: I0929 19:20:13.641125 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dq9jm\" (UniqueName: \"kubernetes.io/projected/16c021a5-5e9d-4b49-8952-0ab84e2f2428-kube-api-access-dq9jm\") pod \"community-operators-s6z9t\" (UID: \"16c021a5-5e9d-4b49-8952-0ab84e2f2428\") " pod="openshift-marketplace/community-operators-s6z9t" Sep 29 19:20:13 crc kubenswrapper[4792]: I0929 19:20:13.742844 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dq9jm\" (UniqueName: \"kubernetes.io/projected/16c021a5-5e9d-4b49-8952-0ab84e2f2428-kube-api-access-dq9jm\") pod \"community-operators-s6z9t\" (UID: \"16c021a5-5e9d-4b49-8952-0ab84e2f2428\") " pod="openshift-marketplace/community-operators-s6z9t" Sep 29 19:20:13 crc kubenswrapper[4792]: I0929 19:20:13.742968 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/16c021a5-5e9d-4b49-8952-0ab84e2f2428-catalog-content\") pod \"community-operators-s6z9t\" (UID: \"16c021a5-5e9d-4b49-8952-0ab84e2f2428\") " pod="openshift-marketplace/community-operators-s6z9t" Sep 29 19:20:13 crc kubenswrapper[4792]: I0929 19:20:13.743043 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/16c021a5-5e9d-4b49-8952-0ab84e2f2428-utilities\") pod \"community-operators-s6z9t\" (UID: \"16c021a5-5e9d-4b49-8952-0ab84e2f2428\") " pod="openshift-marketplace/community-operators-s6z9t" Sep 29 19:20:13 crc kubenswrapper[4792]: I0929 19:20:13.743436 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/16c021a5-5e9d-4b49-8952-0ab84e2f2428-utilities\") pod \"community-operators-s6z9t\" (UID: \"16c021a5-5e9d-4b49-8952-0ab84e2f2428\") " pod="openshift-marketplace/community-operators-s6z9t" Sep 29 19:20:13 crc kubenswrapper[4792]: I0929 19:20:13.743969 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/16c021a5-5e9d-4b49-8952-0ab84e2f2428-catalog-content\") pod \"community-operators-s6z9t\" (UID: \"16c021a5-5e9d-4b49-8952-0ab84e2f2428\") " pod="openshift-marketplace/community-operators-s6z9t" Sep 29 19:20:13 crc kubenswrapper[4792]: I0929 19:20:13.771038 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dq9jm\" (UniqueName: \"kubernetes.io/projected/16c021a5-5e9d-4b49-8952-0ab84e2f2428-kube-api-access-dq9jm\") pod \"community-operators-s6z9t\" (UID: \"16c021a5-5e9d-4b49-8952-0ab84e2f2428\") " pod="openshift-marketplace/community-operators-s6z9t" Sep 29 19:20:13 crc kubenswrapper[4792]: I0929 19:20:13.947615 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-s6z9t" Sep 29 19:20:14 crc kubenswrapper[4792]: I0929 19:20:14.384740 4792 generic.go:334] "Generic (PLEG): container finished" podID="6d1053e6-7c5c-4c2c-828d-c9241606b3e1" containerID="e7901cfd3d8ae00645aa9a5195ab7d0959b097d778e357710d15466201343e0a" exitCode=0 Sep 29 19:20:14 crc kubenswrapper[4792]: I0929 19:20:14.384779 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-67cb876dc9-v2rp9" event={"ID":"6d1053e6-7c5c-4c2c-828d-c9241606b3e1","Type":"ContainerDied","Data":"e7901cfd3d8ae00645aa9a5195ab7d0959b097d778e357710d15466201343e0a"} Sep 29 19:20:14 crc kubenswrapper[4792]: I0929 19:20:14.385060 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-67cb876dc9-v2rp9" event={"ID":"6d1053e6-7c5c-4c2c-828d-c9241606b3e1","Type":"ContainerStarted","Data":"930a851d142bf885c489b3e2928ac4d8cd1716d694ca1cf7317c671647f952dc"} Sep 29 19:20:14 crc kubenswrapper[4792]: I0929 19:20:14.486282 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-s6z9t"] Sep 29 19:20:15 crc kubenswrapper[4792]: I0929 19:20:15.030018 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2c819d6e-de1e-4f6a-8135-ee279636481b" path="/var/lib/kubelet/pods/2c819d6e-de1e-4f6a-8135-ee279636481b/volumes" Sep 29 19:20:15 crc kubenswrapper[4792]: I0929 19:20:15.404797 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-67cb876dc9-v2rp9" event={"ID":"6d1053e6-7c5c-4c2c-828d-c9241606b3e1","Type":"ContainerStarted","Data":"a5ba74948791156508760c8c3a538acbf02c33023217ef2c5f5f7c61116a6385"} Sep 29 19:20:15 crc kubenswrapper[4792]: I0929 19:20:15.405315 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-67cb876dc9-v2rp9" Sep 29 19:20:15 crc kubenswrapper[4792]: I0929 19:20:15.408182 4792 generic.go:334] "Generic (PLEG): container finished" podID="16c021a5-5e9d-4b49-8952-0ab84e2f2428" containerID="37023a652f13b3ba704458d5c1d0a09deb15c02731d6f4ba48598d35bb2b3bce" exitCode=0 Sep 29 19:20:15 crc kubenswrapper[4792]: I0929 19:20:15.408224 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-s6z9t" event={"ID":"16c021a5-5e9d-4b49-8952-0ab84e2f2428","Type":"ContainerDied","Data":"37023a652f13b3ba704458d5c1d0a09deb15c02731d6f4ba48598d35bb2b3bce"} Sep 29 19:20:15 crc kubenswrapper[4792]: I0929 19:20:15.408250 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-s6z9t" event={"ID":"16c021a5-5e9d-4b49-8952-0ab84e2f2428","Type":"ContainerStarted","Data":"44d5064759378d94a8c2773e10fdae81f73cf33ca2c010c2b407cb25658be4f1"} Sep 29 19:20:15 crc kubenswrapper[4792]: I0929 19:20:15.430114 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-67cb876dc9-v2rp9" podStartSLOduration=3.430095736 podStartE2EDuration="3.430095736s" podCreationTimestamp="2025-09-29 19:20:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 19:20:15.423925705 +0000 UTC m=+1427.417233121" watchObservedRunningTime="2025-09-29 19:20:15.430095736 +0000 UTC m=+1427.423403132" Sep 29 19:20:16 crc kubenswrapper[4792]: I0929 19:20:16.419765 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-s6z9t" event={"ID":"16c021a5-5e9d-4b49-8952-0ab84e2f2428","Type":"ContainerStarted","Data":"3c4a7cfa0a118a39c112b879b3ee42a8a363540714d36b8cd29232d433b94218"} Sep 29 19:20:16 crc kubenswrapper[4792]: I0929 19:20:16.851213 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-bmpnh"] Sep 29 19:20:16 crc kubenswrapper[4792]: I0929 19:20:16.853730 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-bmpnh" Sep 29 19:20:16 crc kubenswrapper[4792]: I0929 19:20:16.861958 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-bmpnh"] Sep 29 19:20:16 crc kubenswrapper[4792]: I0929 19:20:16.898824 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4b2b8596-a247-4709-b5ad-a25b53886b52-utilities\") pod \"redhat-marketplace-bmpnh\" (UID: \"4b2b8596-a247-4709-b5ad-a25b53886b52\") " pod="openshift-marketplace/redhat-marketplace-bmpnh" Sep 29 19:20:16 crc kubenswrapper[4792]: I0929 19:20:16.899182 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sgm7z\" (UniqueName: \"kubernetes.io/projected/4b2b8596-a247-4709-b5ad-a25b53886b52-kube-api-access-sgm7z\") pod \"redhat-marketplace-bmpnh\" (UID: \"4b2b8596-a247-4709-b5ad-a25b53886b52\") " pod="openshift-marketplace/redhat-marketplace-bmpnh" Sep 29 19:20:16 crc kubenswrapper[4792]: I0929 19:20:16.899346 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4b2b8596-a247-4709-b5ad-a25b53886b52-catalog-content\") pod \"redhat-marketplace-bmpnh\" (UID: \"4b2b8596-a247-4709-b5ad-a25b53886b52\") " pod="openshift-marketplace/redhat-marketplace-bmpnh" Sep 29 19:20:17 crc kubenswrapper[4792]: I0929 19:20:17.000365 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4b2b8596-a247-4709-b5ad-a25b53886b52-utilities\") pod \"redhat-marketplace-bmpnh\" (UID: \"4b2b8596-a247-4709-b5ad-a25b53886b52\") " pod="openshift-marketplace/redhat-marketplace-bmpnh" Sep 29 19:20:17 crc kubenswrapper[4792]: I0929 19:20:17.000435 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sgm7z\" (UniqueName: \"kubernetes.io/projected/4b2b8596-a247-4709-b5ad-a25b53886b52-kube-api-access-sgm7z\") pod \"redhat-marketplace-bmpnh\" (UID: \"4b2b8596-a247-4709-b5ad-a25b53886b52\") " pod="openshift-marketplace/redhat-marketplace-bmpnh" Sep 29 19:20:17 crc kubenswrapper[4792]: I0929 19:20:17.000568 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4b2b8596-a247-4709-b5ad-a25b53886b52-catalog-content\") pod \"redhat-marketplace-bmpnh\" (UID: \"4b2b8596-a247-4709-b5ad-a25b53886b52\") " pod="openshift-marketplace/redhat-marketplace-bmpnh" Sep 29 19:20:17 crc kubenswrapper[4792]: I0929 19:20:17.000932 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4b2b8596-a247-4709-b5ad-a25b53886b52-utilities\") pod \"redhat-marketplace-bmpnh\" (UID: \"4b2b8596-a247-4709-b5ad-a25b53886b52\") " pod="openshift-marketplace/redhat-marketplace-bmpnh" Sep 29 19:20:17 crc kubenswrapper[4792]: I0929 19:20:17.000952 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4b2b8596-a247-4709-b5ad-a25b53886b52-catalog-content\") pod \"redhat-marketplace-bmpnh\" (UID: \"4b2b8596-a247-4709-b5ad-a25b53886b52\") " pod="openshift-marketplace/redhat-marketplace-bmpnh" Sep 29 19:20:17 crc kubenswrapper[4792]: I0929 19:20:17.019615 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sgm7z\" (UniqueName: \"kubernetes.io/projected/4b2b8596-a247-4709-b5ad-a25b53886b52-kube-api-access-sgm7z\") pod \"redhat-marketplace-bmpnh\" (UID: \"4b2b8596-a247-4709-b5ad-a25b53886b52\") " pod="openshift-marketplace/redhat-marketplace-bmpnh" Sep 29 19:20:17 crc kubenswrapper[4792]: I0929 19:20:17.174434 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-bmpnh" Sep 29 19:20:17 crc kubenswrapper[4792]: I0929 19:20:17.643880 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-bmpnh"] Sep 29 19:20:17 crc kubenswrapper[4792]: W0929 19:20:17.650501 4792 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod4b2b8596_a247_4709_b5ad_a25b53886b52.slice/crio-70645ab9c02057af4279e9bb3dd2014fffecfc5e93bf11cfc870e15177de3e98 WatchSource:0}: Error finding container 70645ab9c02057af4279e9bb3dd2014fffecfc5e93bf11cfc870e15177de3e98: Status 404 returned error can't find the container with id 70645ab9c02057af4279e9bb3dd2014fffecfc5e93bf11cfc870e15177de3e98 Sep 29 19:20:18 crc kubenswrapper[4792]: I0929 19:20:18.440047 4792 generic.go:334] "Generic (PLEG): container finished" podID="16c021a5-5e9d-4b49-8952-0ab84e2f2428" containerID="3c4a7cfa0a118a39c112b879b3ee42a8a363540714d36b8cd29232d433b94218" exitCode=0 Sep 29 19:20:18 crc kubenswrapper[4792]: I0929 19:20:18.440124 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-s6z9t" event={"ID":"16c021a5-5e9d-4b49-8952-0ab84e2f2428","Type":"ContainerDied","Data":"3c4a7cfa0a118a39c112b879b3ee42a8a363540714d36b8cd29232d433b94218"} Sep 29 19:20:18 crc kubenswrapper[4792]: I0929 19:20:18.443519 4792 generic.go:334] "Generic (PLEG): container finished" podID="4b2b8596-a247-4709-b5ad-a25b53886b52" containerID="b437bc6c6cc9557a3e70afe841289472f375bccf380e1ee8dc0613a6db741db8" exitCode=0 Sep 29 19:20:18 crc kubenswrapper[4792]: I0929 19:20:18.443561 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-bmpnh" event={"ID":"4b2b8596-a247-4709-b5ad-a25b53886b52","Type":"ContainerDied","Data":"b437bc6c6cc9557a3e70afe841289472f375bccf380e1ee8dc0613a6db741db8"} Sep 29 19:20:18 crc kubenswrapper[4792]: I0929 19:20:18.443589 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-bmpnh" event={"ID":"4b2b8596-a247-4709-b5ad-a25b53886b52","Type":"ContainerStarted","Data":"70645ab9c02057af4279e9bb3dd2014fffecfc5e93bf11cfc870e15177de3e98"} Sep 29 19:20:19 crc kubenswrapper[4792]: I0929 19:20:19.456425 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-bmpnh" event={"ID":"4b2b8596-a247-4709-b5ad-a25b53886b52","Type":"ContainerStarted","Data":"8da2ae1d6b3e14dfcce0bc419e2288917f251ec17d99ee533c63ecb1a37fafea"} Sep 29 19:20:19 crc kubenswrapper[4792]: I0929 19:20:19.459644 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-s6z9t" event={"ID":"16c021a5-5e9d-4b49-8952-0ab84e2f2428","Type":"ContainerStarted","Data":"56cc26b0e16d34705662311d92c1dd11d327c618fef895783e5bcf86c7c2acd1"} Sep 29 19:20:19 crc kubenswrapper[4792]: I0929 19:20:19.505467 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-s6z9t" podStartSLOduration=3.036784614 podStartE2EDuration="6.505442733s" podCreationTimestamp="2025-09-29 19:20:13 +0000 UTC" firstStartedPulling="2025-09-29 19:20:15.410158015 +0000 UTC m=+1427.403465421" lastFinishedPulling="2025-09-29 19:20:18.878816144 +0000 UTC m=+1430.872123540" observedRunningTime="2025-09-29 19:20:19.500315248 +0000 UTC m=+1431.493622664" watchObservedRunningTime="2025-09-29 19:20:19.505442733 +0000 UTC m=+1431.498750129" Sep 29 19:20:20 crc kubenswrapper[4792]: I0929 19:20:20.470288 4792 generic.go:334] "Generic (PLEG): container finished" podID="4b2b8596-a247-4709-b5ad-a25b53886b52" containerID="8da2ae1d6b3e14dfcce0bc419e2288917f251ec17d99ee533c63ecb1a37fafea" exitCode=0 Sep 29 19:20:20 crc kubenswrapper[4792]: I0929 19:20:20.470325 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-bmpnh" event={"ID":"4b2b8596-a247-4709-b5ad-a25b53886b52","Type":"ContainerDied","Data":"8da2ae1d6b3e14dfcce0bc419e2288917f251ec17d99ee533c63ecb1a37fafea"} Sep 29 19:20:21 crc kubenswrapper[4792]: I0929 19:20:21.485200 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-bmpnh" event={"ID":"4b2b8596-a247-4709-b5ad-a25b53886b52","Type":"ContainerStarted","Data":"5fac004e99c480594214353fe4cd7e2787817faa34693d7d6a3d93b567119e16"} Sep 29 19:20:21 crc kubenswrapper[4792]: I0929 19:20:21.508030 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-bmpnh" podStartSLOduration=3.017662665 podStartE2EDuration="5.507990047s" podCreationTimestamp="2025-09-29 19:20:16 +0000 UTC" firstStartedPulling="2025-09-29 19:20:18.445945642 +0000 UTC m=+1430.439253038" lastFinishedPulling="2025-09-29 19:20:20.936273024 +0000 UTC m=+1432.929580420" observedRunningTime="2025-09-29 19:20:21.507109114 +0000 UTC m=+1433.500416530" watchObservedRunningTime="2025-09-29 19:20:21.507990047 +0000 UTC m=+1433.501297453" Sep 29 19:20:22 crc kubenswrapper[4792]: I0929 19:20:22.890666 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-67cb876dc9-v2rp9" Sep 29 19:20:22 crc kubenswrapper[4792]: I0929 19:20:22.945035 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-d558885bc-skcsq"] Sep 29 19:20:22 crc kubenswrapper[4792]: I0929 19:20:22.945317 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-d558885bc-skcsq" podUID="5c4ccfb1-8403-4880-b6af-254c2a9f7dac" containerName="dnsmasq-dns" containerID="cri-o://4089576a821e35a591900e7033cf50f445aa7a19b2841a0ac33bda890d0a020a" gracePeriod=10 Sep 29 19:20:23 crc kubenswrapper[4792]: I0929 19:20:23.468608 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-d558885bc-skcsq" Sep 29 19:20:23 crc kubenswrapper[4792]: I0929 19:20:23.503250 4792 generic.go:334] "Generic (PLEG): container finished" podID="5c4ccfb1-8403-4880-b6af-254c2a9f7dac" containerID="4089576a821e35a591900e7033cf50f445aa7a19b2841a0ac33bda890d0a020a" exitCode=0 Sep 29 19:20:23 crc kubenswrapper[4792]: I0929 19:20:23.503312 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-d558885bc-skcsq" Sep 29 19:20:23 crc kubenswrapper[4792]: I0929 19:20:23.503360 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-d558885bc-skcsq" event={"ID":"5c4ccfb1-8403-4880-b6af-254c2a9f7dac","Type":"ContainerDied","Data":"4089576a821e35a591900e7033cf50f445aa7a19b2841a0ac33bda890d0a020a"} Sep 29 19:20:23 crc kubenswrapper[4792]: I0929 19:20:23.503405 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-d558885bc-skcsq" event={"ID":"5c4ccfb1-8403-4880-b6af-254c2a9f7dac","Type":"ContainerDied","Data":"a9fdb47524028a5d4efe3a1369bf1e82f386727eb0c49aa4e6bbcf6f33d972c0"} Sep 29 19:20:23 crc kubenswrapper[4792]: I0929 19:20:23.503448 4792 scope.go:117] "RemoveContainer" containerID="4089576a821e35a591900e7033cf50f445aa7a19b2841a0ac33bda890d0a020a" Sep 29 19:20:23 crc kubenswrapper[4792]: I0929 19:20:23.527669 4792 scope.go:117] "RemoveContainer" containerID="851db6f8175062a8626b099efa3b0c0b7052130c4ddd9e8aa38ae974d4d82f9c" Sep 29 19:20:23 crc kubenswrapper[4792]: I0929 19:20:23.539403 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/5c4ccfb1-8403-4880-b6af-254c2a9f7dac-ovsdbserver-sb\") pod \"5c4ccfb1-8403-4880-b6af-254c2a9f7dac\" (UID: \"5c4ccfb1-8403-4880-b6af-254c2a9f7dac\") " Sep 29 19:20:23 crc kubenswrapper[4792]: I0929 19:20:23.539460 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/5c4ccfb1-8403-4880-b6af-254c2a9f7dac-ovsdbserver-nb\") pod \"5c4ccfb1-8403-4880-b6af-254c2a9f7dac\" (UID: \"5c4ccfb1-8403-4880-b6af-254c2a9f7dac\") " Sep 29 19:20:23 crc kubenswrapper[4792]: I0929 19:20:23.539493 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5c4ccfb1-8403-4880-b6af-254c2a9f7dac-dns-svc\") pod \"5c4ccfb1-8403-4880-b6af-254c2a9f7dac\" (UID: \"5c4ccfb1-8403-4880-b6af-254c2a9f7dac\") " Sep 29 19:20:23 crc kubenswrapper[4792]: I0929 19:20:23.539525 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vwzhb\" (UniqueName: \"kubernetes.io/projected/5c4ccfb1-8403-4880-b6af-254c2a9f7dac-kube-api-access-vwzhb\") pod \"5c4ccfb1-8403-4880-b6af-254c2a9f7dac\" (UID: \"5c4ccfb1-8403-4880-b6af-254c2a9f7dac\") " Sep 29 19:20:23 crc kubenswrapper[4792]: I0929 19:20:23.539605 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/5c4ccfb1-8403-4880-b6af-254c2a9f7dac-openstack-edpm-ipam\") pod \"5c4ccfb1-8403-4880-b6af-254c2a9f7dac\" (UID: \"5c4ccfb1-8403-4880-b6af-254c2a9f7dac\") " Sep 29 19:20:23 crc kubenswrapper[4792]: I0929 19:20:23.539761 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5c4ccfb1-8403-4880-b6af-254c2a9f7dac-config\") pod \"5c4ccfb1-8403-4880-b6af-254c2a9f7dac\" (UID: \"5c4ccfb1-8403-4880-b6af-254c2a9f7dac\") " Sep 29 19:20:23 crc kubenswrapper[4792]: I0929 19:20:23.539811 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/5c4ccfb1-8403-4880-b6af-254c2a9f7dac-dns-swift-storage-0\") pod \"5c4ccfb1-8403-4880-b6af-254c2a9f7dac\" (UID: \"5c4ccfb1-8403-4880-b6af-254c2a9f7dac\") " Sep 29 19:20:23 crc kubenswrapper[4792]: I0929 19:20:23.557082 4792 scope.go:117] "RemoveContainer" containerID="4089576a821e35a591900e7033cf50f445aa7a19b2841a0ac33bda890d0a020a" Sep 29 19:20:23 crc kubenswrapper[4792]: I0929 19:20:23.562077 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5c4ccfb1-8403-4880-b6af-254c2a9f7dac-kube-api-access-vwzhb" (OuterVolumeSpecName: "kube-api-access-vwzhb") pod "5c4ccfb1-8403-4880-b6af-254c2a9f7dac" (UID: "5c4ccfb1-8403-4880-b6af-254c2a9f7dac"). InnerVolumeSpecName "kube-api-access-vwzhb". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:20:23 crc kubenswrapper[4792]: E0929 19:20:23.563289 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4089576a821e35a591900e7033cf50f445aa7a19b2841a0ac33bda890d0a020a\": container with ID starting with 4089576a821e35a591900e7033cf50f445aa7a19b2841a0ac33bda890d0a020a not found: ID does not exist" containerID="4089576a821e35a591900e7033cf50f445aa7a19b2841a0ac33bda890d0a020a" Sep 29 19:20:23 crc kubenswrapper[4792]: I0929 19:20:23.563399 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4089576a821e35a591900e7033cf50f445aa7a19b2841a0ac33bda890d0a020a"} err="failed to get container status \"4089576a821e35a591900e7033cf50f445aa7a19b2841a0ac33bda890d0a020a\": rpc error: code = NotFound desc = could not find container \"4089576a821e35a591900e7033cf50f445aa7a19b2841a0ac33bda890d0a020a\": container with ID starting with 4089576a821e35a591900e7033cf50f445aa7a19b2841a0ac33bda890d0a020a not found: ID does not exist" Sep 29 19:20:23 crc kubenswrapper[4792]: I0929 19:20:23.563534 4792 scope.go:117] "RemoveContainer" containerID="851db6f8175062a8626b099efa3b0c0b7052130c4ddd9e8aa38ae974d4d82f9c" Sep 29 19:20:23 crc kubenswrapper[4792]: E0929 19:20:23.564054 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"851db6f8175062a8626b099efa3b0c0b7052130c4ddd9e8aa38ae974d4d82f9c\": container with ID starting with 851db6f8175062a8626b099efa3b0c0b7052130c4ddd9e8aa38ae974d4d82f9c not found: ID does not exist" containerID="851db6f8175062a8626b099efa3b0c0b7052130c4ddd9e8aa38ae974d4d82f9c" Sep 29 19:20:23 crc kubenswrapper[4792]: I0929 19:20:23.564103 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"851db6f8175062a8626b099efa3b0c0b7052130c4ddd9e8aa38ae974d4d82f9c"} err="failed to get container status \"851db6f8175062a8626b099efa3b0c0b7052130c4ddd9e8aa38ae974d4d82f9c\": rpc error: code = NotFound desc = could not find container \"851db6f8175062a8626b099efa3b0c0b7052130c4ddd9e8aa38ae974d4d82f9c\": container with ID starting with 851db6f8175062a8626b099efa3b0c0b7052130c4ddd9e8aa38ae974d4d82f9c not found: ID does not exist" Sep 29 19:20:23 crc kubenswrapper[4792]: I0929 19:20:23.612441 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5c4ccfb1-8403-4880-b6af-254c2a9f7dac-config" (OuterVolumeSpecName: "config") pod "5c4ccfb1-8403-4880-b6af-254c2a9f7dac" (UID: "5c4ccfb1-8403-4880-b6af-254c2a9f7dac"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:20:23 crc kubenswrapper[4792]: I0929 19:20:23.634715 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5c4ccfb1-8403-4880-b6af-254c2a9f7dac-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "5c4ccfb1-8403-4880-b6af-254c2a9f7dac" (UID: "5c4ccfb1-8403-4880-b6af-254c2a9f7dac"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:20:23 crc kubenswrapper[4792]: I0929 19:20:23.639440 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5c4ccfb1-8403-4880-b6af-254c2a9f7dac-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "5c4ccfb1-8403-4880-b6af-254c2a9f7dac" (UID: "5c4ccfb1-8403-4880-b6af-254c2a9f7dac"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:20:23 crc kubenswrapper[4792]: I0929 19:20:23.642459 4792 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5c4ccfb1-8403-4880-b6af-254c2a9f7dac-config\") on node \"crc\" DevicePath \"\"" Sep 29 19:20:23 crc kubenswrapper[4792]: I0929 19:20:23.642489 4792 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/5c4ccfb1-8403-4880-b6af-254c2a9f7dac-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Sep 29 19:20:23 crc kubenswrapper[4792]: I0929 19:20:23.642501 4792 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/5c4ccfb1-8403-4880-b6af-254c2a9f7dac-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Sep 29 19:20:23 crc kubenswrapper[4792]: I0929 19:20:23.642513 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vwzhb\" (UniqueName: \"kubernetes.io/projected/5c4ccfb1-8403-4880-b6af-254c2a9f7dac-kube-api-access-vwzhb\") on node \"crc\" DevicePath \"\"" Sep 29 19:20:23 crc kubenswrapper[4792]: I0929 19:20:23.649819 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5c4ccfb1-8403-4880-b6af-254c2a9f7dac-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "5c4ccfb1-8403-4880-b6af-254c2a9f7dac" (UID: "5c4ccfb1-8403-4880-b6af-254c2a9f7dac"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:20:23 crc kubenswrapper[4792]: I0929 19:20:23.660862 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5c4ccfb1-8403-4880-b6af-254c2a9f7dac-openstack-edpm-ipam" (OuterVolumeSpecName: "openstack-edpm-ipam") pod "5c4ccfb1-8403-4880-b6af-254c2a9f7dac" (UID: "5c4ccfb1-8403-4880-b6af-254c2a9f7dac"). InnerVolumeSpecName "openstack-edpm-ipam". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:20:23 crc kubenswrapper[4792]: I0929 19:20:23.664378 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5c4ccfb1-8403-4880-b6af-254c2a9f7dac-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "5c4ccfb1-8403-4880-b6af-254c2a9f7dac" (UID: "5c4ccfb1-8403-4880-b6af-254c2a9f7dac"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:20:23 crc kubenswrapper[4792]: I0929 19:20:23.744146 4792 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/5c4ccfb1-8403-4880-b6af-254c2a9f7dac-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Sep 29 19:20:23 crc kubenswrapper[4792]: I0929 19:20:23.744175 4792 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5c4ccfb1-8403-4880-b6af-254c2a9f7dac-dns-svc\") on node \"crc\" DevicePath \"\"" Sep 29 19:20:23 crc kubenswrapper[4792]: I0929 19:20:23.744186 4792 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/5c4ccfb1-8403-4880-b6af-254c2a9f7dac-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Sep 29 19:20:23 crc kubenswrapper[4792]: I0929 19:20:23.833197 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-d558885bc-skcsq"] Sep 29 19:20:23 crc kubenswrapper[4792]: I0929 19:20:23.848275 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-d558885bc-skcsq"] Sep 29 19:20:23 crc kubenswrapper[4792]: I0929 19:20:23.948290 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-s6z9t" Sep 29 19:20:23 crc kubenswrapper[4792]: I0929 19:20:23.949532 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-s6z9t" Sep 29 19:20:25 crc kubenswrapper[4792]: I0929 19:20:25.026650 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5c4ccfb1-8403-4880-b6af-254c2a9f7dac" path="/var/lib/kubelet/pods/5c4ccfb1-8403-4880-b6af-254c2a9f7dac/volumes" Sep 29 19:20:25 crc kubenswrapper[4792]: I0929 19:20:25.039475 4792 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/community-operators-s6z9t" podUID="16c021a5-5e9d-4b49-8952-0ab84e2f2428" containerName="registry-server" probeResult="failure" output=< Sep 29 19:20:25 crc kubenswrapper[4792]: timeout: failed to connect service ":50051" within 1s Sep 29 19:20:25 crc kubenswrapper[4792]: > Sep 29 19:20:27 crc kubenswrapper[4792]: I0929 19:20:27.174970 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-bmpnh" Sep 29 19:20:27 crc kubenswrapper[4792]: I0929 19:20:27.175021 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-bmpnh" Sep 29 19:20:27 crc kubenswrapper[4792]: I0929 19:20:27.217906 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-bmpnh" Sep 29 19:20:27 crc kubenswrapper[4792]: I0929 19:20:27.580389 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-bmpnh" Sep 29 19:20:27 crc kubenswrapper[4792]: I0929 19:20:27.641724 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-bmpnh"] Sep 29 19:20:29 crc kubenswrapper[4792]: I0929 19:20:29.560393 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-bmpnh" podUID="4b2b8596-a247-4709-b5ad-a25b53886b52" containerName="registry-server" containerID="cri-o://5fac004e99c480594214353fe4cd7e2787817faa34693d7d6a3d93b567119e16" gracePeriod=2 Sep 29 19:20:30 crc kubenswrapper[4792]: I0929 19:20:30.030838 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-bmpnh" Sep 29 19:20:30 crc kubenswrapper[4792]: I0929 19:20:30.059299 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sgm7z\" (UniqueName: \"kubernetes.io/projected/4b2b8596-a247-4709-b5ad-a25b53886b52-kube-api-access-sgm7z\") pod \"4b2b8596-a247-4709-b5ad-a25b53886b52\" (UID: \"4b2b8596-a247-4709-b5ad-a25b53886b52\") " Sep 29 19:20:30 crc kubenswrapper[4792]: I0929 19:20:30.059356 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4b2b8596-a247-4709-b5ad-a25b53886b52-catalog-content\") pod \"4b2b8596-a247-4709-b5ad-a25b53886b52\" (UID: \"4b2b8596-a247-4709-b5ad-a25b53886b52\") " Sep 29 19:20:30 crc kubenswrapper[4792]: I0929 19:20:30.080089 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4b2b8596-a247-4709-b5ad-a25b53886b52-kube-api-access-sgm7z" (OuterVolumeSpecName: "kube-api-access-sgm7z") pod "4b2b8596-a247-4709-b5ad-a25b53886b52" (UID: "4b2b8596-a247-4709-b5ad-a25b53886b52"). InnerVolumeSpecName "kube-api-access-sgm7z". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:20:30 crc kubenswrapper[4792]: I0929 19:20:30.085474 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4b2b8596-a247-4709-b5ad-a25b53886b52-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "4b2b8596-a247-4709-b5ad-a25b53886b52" (UID: "4b2b8596-a247-4709-b5ad-a25b53886b52"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 19:20:30 crc kubenswrapper[4792]: I0929 19:20:30.161078 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4b2b8596-a247-4709-b5ad-a25b53886b52-utilities\") pod \"4b2b8596-a247-4709-b5ad-a25b53886b52\" (UID: \"4b2b8596-a247-4709-b5ad-a25b53886b52\") " Sep 29 19:20:30 crc kubenswrapper[4792]: I0929 19:20:30.161387 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sgm7z\" (UniqueName: \"kubernetes.io/projected/4b2b8596-a247-4709-b5ad-a25b53886b52-kube-api-access-sgm7z\") on node \"crc\" DevicePath \"\"" Sep 29 19:20:30 crc kubenswrapper[4792]: I0929 19:20:30.161402 4792 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4b2b8596-a247-4709-b5ad-a25b53886b52-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 19:20:30 crc kubenswrapper[4792]: I0929 19:20:30.161761 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4b2b8596-a247-4709-b5ad-a25b53886b52-utilities" (OuterVolumeSpecName: "utilities") pod "4b2b8596-a247-4709-b5ad-a25b53886b52" (UID: "4b2b8596-a247-4709-b5ad-a25b53886b52"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 19:20:30 crc kubenswrapper[4792]: I0929 19:20:30.263166 4792 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4b2b8596-a247-4709-b5ad-a25b53886b52-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 19:20:30 crc kubenswrapper[4792]: I0929 19:20:30.570961 4792 generic.go:334] "Generic (PLEG): container finished" podID="4b2b8596-a247-4709-b5ad-a25b53886b52" containerID="5fac004e99c480594214353fe4cd7e2787817faa34693d7d6a3d93b567119e16" exitCode=0 Sep 29 19:20:30 crc kubenswrapper[4792]: I0929 19:20:30.571015 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-bmpnh" Sep 29 19:20:30 crc kubenswrapper[4792]: I0929 19:20:30.571971 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-bmpnh" event={"ID":"4b2b8596-a247-4709-b5ad-a25b53886b52","Type":"ContainerDied","Data":"5fac004e99c480594214353fe4cd7e2787817faa34693d7d6a3d93b567119e16"} Sep 29 19:20:30 crc kubenswrapper[4792]: I0929 19:20:30.572016 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-bmpnh" event={"ID":"4b2b8596-a247-4709-b5ad-a25b53886b52","Type":"ContainerDied","Data":"70645ab9c02057af4279e9bb3dd2014fffecfc5e93bf11cfc870e15177de3e98"} Sep 29 19:20:30 crc kubenswrapper[4792]: I0929 19:20:30.572036 4792 scope.go:117] "RemoveContainer" containerID="5fac004e99c480594214353fe4cd7e2787817faa34693d7d6a3d93b567119e16" Sep 29 19:20:30 crc kubenswrapper[4792]: I0929 19:20:30.594349 4792 scope.go:117] "RemoveContainer" containerID="8da2ae1d6b3e14dfcce0bc419e2288917f251ec17d99ee533c63ecb1a37fafea" Sep 29 19:20:30 crc kubenswrapper[4792]: I0929 19:20:30.609458 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-bmpnh"] Sep 29 19:20:30 crc kubenswrapper[4792]: I0929 19:20:30.651938 4792 scope.go:117] "RemoveContainer" containerID="b437bc6c6cc9557a3e70afe841289472f375bccf380e1ee8dc0613a6db741db8" Sep 29 19:20:30 crc kubenswrapper[4792]: I0929 19:20:30.662760 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-bmpnh"] Sep 29 19:20:30 crc kubenswrapper[4792]: I0929 19:20:30.693093 4792 scope.go:117] "RemoveContainer" containerID="5fac004e99c480594214353fe4cd7e2787817faa34693d7d6a3d93b567119e16" Sep 29 19:20:30 crc kubenswrapper[4792]: E0929 19:20:30.693554 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5fac004e99c480594214353fe4cd7e2787817faa34693d7d6a3d93b567119e16\": container with ID starting with 5fac004e99c480594214353fe4cd7e2787817faa34693d7d6a3d93b567119e16 not found: ID does not exist" containerID="5fac004e99c480594214353fe4cd7e2787817faa34693d7d6a3d93b567119e16" Sep 29 19:20:30 crc kubenswrapper[4792]: I0929 19:20:30.693603 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5fac004e99c480594214353fe4cd7e2787817faa34693d7d6a3d93b567119e16"} err="failed to get container status \"5fac004e99c480594214353fe4cd7e2787817faa34693d7d6a3d93b567119e16\": rpc error: code = NotFound desc = could not find container \"5fac004e99c480594214353fe4cd7e2787817faa34693d7d6a3d93b567119e16\": container with ID starting with 5fac004e99c480594214353fe4cd7e2787817faa34693d7d6a3d93b567119e16 not found: ID does not exist" Sep 29 19:20:30 crc kubenswrapper[4792]: I0929 19:20:30.693631 4792 scope.go:117] "RemoveContainer" containerID="8da2ae1d6b3e14dfcce0bc419e2288917f251ec17d99ee533c63ecb1a37fafea" Sep 29 19:20:30 crc kubenswrapper[4792]: E0929 19:20:30.694029 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8da2ae1d6b3e14dfcce0bc419e2288917f251ec17d99ee533c63ecb1a37fafea\": container with ID starting with 8da2ae1d6b3e14dfcce0bc419e2288917f251ec17d99ee533c63ecb1a37fafea not found: ID does not exist" containerID="8da2ae1d6b3e14dfcce0bc419e2288917f251ec17d99ee533c63ecb1a37fafea" Sep 29 19:20:30 crc kubenswrapper[4792]: I0929 19:20:30.694054 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8da2ae1d6b3e14dfcce0bc419e2288917f251ec17d99ee533c63ecb1a37fafea"} err="failed to get container status \"8da2ae1d6b3e14dfcce0bc419e2288917f251ec17d99ee533c63ecb1a37fafea\": rpc error: code = NotFound desc = could not find container \"8da2ae1d6b3e14dfcce0bc419e2288917f251ec17d99ee533c63ecb1a37fafea\": container with ID starting with 8da2ae1d6b3e14dfcce0bc419e2288917f251ec17d99ee533c63ecb1a37fafea not found: ID does not exist" Sep 29 19:20:30 crc kubenswrapper[4792]: I0929 19:20:30.694067 4792 scope.go:117] "RemoveContainer" containerID="b437bc6c6cc9557a3e70afe841289472f375bccf380e1ee8dc0613a6db741db8" Sep 29 19:20:30 crc kubenswrapper[4792]: E0929 19:20:30.694482 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b437bc6c6cc9557a3e70afe841289472f375bccf380e1ee8dc0613a6db741db8\": container with ID starting with b437bc6c6cc9557a3e70afe841289472f375bccf380e1ee8dc0613a6db741db8 not found: ID does not exist" containerID="b437bc6c6cc9557a3e70afe841289472f375bccf380e1ee8dc0613a6db741db8" Sep 29 19:20:30 crc kubenswrapper[4792]: I0929 19:20:30.694501 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b437bc6c6cc9557a3e70afe841289472f375bccf380e1ee8dc0613a6db741db8"} err="failed to get container status \"b437bc6c6cc9557a3e70afe841289472f375bccf380e1ee8dc0613a6db741db8\": rpc error: code = NotFound desc = could not find container \"b437bc6c6cc9557a3e70afe841289472f375bccf380e1ee8dc0613a6db741db8\": container with ID starting with b437bc6c6cc9557a3e70afe841289472f375bccf380e1ee8dc0613a6db741db8 not found: ID does not exist" Sep 29 19:20:31 crc kubenswrapper[4792]: I0929 19:20:31.028045 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4b2b8596-a247-4709-b5ad-a25b53886b52" path="/var/lib/kubelet/pods/4b2b8596-a247-4709-b5ad-a25b53886b52/volumes" Sep 29 19:20:31 crc kubenswrapper[4792]: I0929 19:20:31.594404 4792 generic.go:334] "Generic (PLEG): container finished" podID="dd9e8433-9eac-49a2-bacd-7acb220b0efd" containerID="1020885a420ebf33decb4eb4ec9127536b519a0483ee76cc36d12548a02b28f4" exitCode=0 Sep 29 19:20:31 crc kubenswrapper[4792]: I0929 19:20:31.594494 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"dd9e8433-9eac-49a2-bacd-7acb220b0efd","Type":"ContainerDied","Data":"1020885a420ebf33decb4eb4ec9127536b519a0483ee76cc36d12548a02b28f4"} Sep 29 19:20:32 crc kubenswrapper[4792]: I0929 19:20:32.609589 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"dd9e8433-9eac-49a2-bacd-7acb220b0efd","Type":"ContainerStarted","Data":"eab5c87d777296eb15a0ee505c47cca78689167686f7ab0efde43ac03ad81fa9"} Sep 29 19:20:32 crc kubenswrapper[4792]: I0929 19:20:32.610398 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-server-0" Sep 29 19:20:32 crc kubenswrapper[4792]: I0929 19:20:32.612271 4792 generic.go:334] "Generic (PLEG): container finished" podID="4e364c89-8b07-427c-a59a-c4576f98ddf2" containerID="5fdc2a1cf93c9ae4f28622fc359a167fc2b9ca22c5be2324ab1a28b364140a42" exitCode=0 Sep 29 19:20:32 crc kubenswrapper[4792]: I0929 19:20:32.612315 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"4e364c89-8b07-427c-a59a-c4576f98ddf2","Type":"ContainerDied","Data":"5fdc2a1cf93c9ae4f28622fc359a167fc2b9ca22c5be2324ab1a28b364140a42"} Sep 29 19:20:32 crc kubenswrapper[4792]: I0929 19:20:32.662422 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-server-0" podStartSLOduration=33.662400018 podStartE2EDuration="33.662400018s" podCreationTimestamp="2025-09-29 19:19:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 19:20:32.659180984 +0000 UTC m=+1444.652488400" watchObservedRunningTime="2025-09-29 19:20:32.662400018 +0000 UTC m=+1444.655707434" Sep 29 19:20:33 crc kubenswrapper[4792]: I0929 19:20:33.622729 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"4e364c89-8b07-427c-a59a-c4576f98ddf2","Type":"ContainerStarted","Data":"82e8d1b57046a38c38a28bb295172efcd72288819317b60b9b228b2d73d4feb1"} Sep 29 19:20:33 crc kubenswrapper[4792]: I0929 19:20:33.623242 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-cell1-server-0" Sep 29 19:20:33 crc kubenswrapper[4792]: I0929 19:20:33.652432 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-cell1-server-0" podStartSLOduration=33.652412561 podStartE2EDuration="33.652412561s" podCreationTimestamp="2025-09-29 19:20:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 19:20:33.644672719 +0000 UTC m=+1445.637980125" watchObservedRunningTime="2025-09-29 19:20:33.652412561 +0000 UTC m=+1445.645719957" Sep 29 19:20:33 crc kubenswrapper[4792]: I0929 19:20:33.994456 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-s6z9t" Sep 29 19:20:34 crc kubenswrapper[4792]: I0929 19:20:34.045958 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-s6z9t" Sep 29 19:20:34 crc kubenswrapper[4792]: I0929 19:20:34.235957 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-s6z9t"] Sep 29 19:20:35 crc kubenswrapper[4792]: I0929 19:20:35.639759 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-s6z9t" podUID="16c021a5-5e9d-4b49-8952-0ab84e2f2428" containerName="registry-server" containerID="cri-o://56cc26b0e16d34705662311d92c1dd11d327c618fef895783e5bcf86c7c2acd1" gracePeriod=2 Sep 29 19:20:36 crc kubenswrapper[4792]: I0929 19:20:36.235558 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-s6z9t" Sep 29 19:20:36 crc kubenswrapper[4792]: I0929 19:20:36.387149 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/16c021a5-5e9d-4b49-8952-0ab84e2f2428-catalog-content\") pod \"16c021a5-5e9d-4b49-8952-0ab84e2f2428\" (UID: \"16c021a5-5e9d-4b49-8952-0ab84e2f2428\") " Sep 29 19:20:36 crc kubenswrapper[4792]: I0929 19:20:36.387203 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/16c021a5-5e9d-4b49-8952-0ab84e2f2428-utilities\") pod \"16c021a5-5e9d-4b49-8952-0ab84e2f2428\" (UID: \"16c021a5-5e9d-4b49-8952-0ab84e2f2428\") " Sep 29 19:20:36 crc kubenswrapper[4792]: I0929 19:20:36.387344 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dq9jm\" (UniqueName: \"kubernetes.io/projected/16c021a5-5e9d-4b49-8952-0ab84e2f2428-kube-api-access-dq9jm\") pod \"16c021a5-5e9d-4b49-8952-0ab84e2f2428\" (UID: \"16c021a5-5e9d-4b49-8952-0ab84e2f2428\") " Sep 29 19:20:36 crc kubenswrapper[4792]: I0929 19:20:36.388927 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/16c021a5-5e9d-4b49-8952-0ab84e2f2428-utilities" (OuterVolumeSpecName: "utilities") pod "16c021a5-5e9d-4b49-8952-0ab84e2f2428" (UID: "16c021a5-5e9d-4b49-8952-0ab84e2f2428"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 19:20:36 crc kubenswrapper[4792]: I0929 19:20:36.393395 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/16c021a5-5e9d-4b49-8952-0ab84e2f2428-kube-api-access-dq9jm" (OuterVolumeSpecName: "kube-api-access-dq9jm") pod "16c021a5-5e9d-4b49-8952-0ab84e2f2428" (UID: "16c021a5-5e9d-4b49-8952-0ab84e2f2428"). InnerVolumeSpecName "kube-api-access-dq9jm". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:20:36 crc kubenswrapper[4792]: I0929 19:20:36.456738 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/16c021a5-5e9d-4b49-8952-0ab84e2f2428-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "16c021a5-5e9d-4b49-8952-0ab84e2f2428" (UID: "16c021a5-5e9d-4b49-8952-0ab84e2f2428"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 19:20:36 crc kubenswrapper[4792]: I0929 19:20:36.489085 4792 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/16c021a5-5e9d-4b49-8952-0ab84e2f2428-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 19:20:36 crc kubenswrapper[4792]: I0929 19:20:36.489382 4792 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/16c021a5-5e9d-4b49-8952-0ab84e2f2428-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 19:20:36 crc kubenswrapper[4792]: I0929 19:20:36.489512 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dq9jm\" (UniqueName: \"kubernetes.io/projected/16c021a5-5e9d-4b49-8952-0ab84e2f2428-kube-api-access-dq9jm\") on node \"crc\" DevicePath \"\"" Sep 29 19:20:36 crc kubenswrapper[4792]: I0929 19:20:36.649189 4792 generic.go:334] "Generic (PLEG): container finished" podID="16c021a5-5e9d-4b49-8952-0ab84e2f2428" containerID="56cc26b0e16d34705662311d92c1dd11d327c618fef895783e5bcf86c7c2acd1" exitCode=0 Sep 29 19:20:36 crc kubenswrapper[4792]: I0929 19:20:36.649224 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-s6z9t" event={"ID":"16c021a5-5e9d-4b49-8952-0ab84e2f2428","Type":"ContainerDied","Data":"56cc26b0e16d34705662311d92c1dd11d327c618fef895783e5bcf86c7c2acd1"} Sep 29 19:20:36 crc kubenswrapper[4792]: I0929 19:20:36.649247 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-s6z9t" event={"ID":"16c021a5-5e9d-4b49-8952-0ab84e2f2428","Type":"ContainerDied","Data":"44d5064759378d94a8c2773e10fdae81f73cf33ca2c010c2b407cb25658be4f1"} Sep 29 19:20:36 crc kubenswrapper[4792]: I0929 19:20:36.649264 4792 scope.go:117] "RemoveContainer" containerID="56cc26b0e16d34705662311d92c1dd11d327c618fef895783e5bcf86c7c2acd1" Sep 29 19:20:36 crc kubenswrapper[4792]: I0929 19:20:36.649370 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-s6z9t" Sep 29 19:20:36 crc kubenswrapper[4792]: I0929 19:20:36.669548 4792 scope.go:117] "RemoveContainer" containerID="3c4a7cfa0a118a39c112b879b3ee42a8a363540714d36b8cd29232d433b94218" Sep 29 19:20:36 crc kubenswrapper[4792]: I0929 19:20:36.682320 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-s6z9t"] Sep 29 19:20:36 crc kubenswrapper[4792]: I0929 19:20:36.691453 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-s6z9t"] Sep 29 19:20:36 crc kubenswrapper[4792]: I0929 19:20:36.694661 4792 scope.go:117] "RemoveContainer" containerID="37023a652f13b3ba704458d5c1d0a09deb15c02731d6f4ba48598d35bb2b3bce" Sep 29 19:20:36 crc kubenswrapper[4792]: I0929 19:20:36.744103 4792 scope.go:117] "RemoveContainer" containerID="56cc26b0e16d34705662311d92c1dd11d327c618fef895783e5bcf86c7c2acd1" Sep 29 19:20:36 crc kubenswrapper[4792]: E0929 19:20:36.744657 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"56cc26b0e16d34705662311d92c1dd11d327c618fef895783e5bcf86c7c2acd1\": container with ID starting with 56cc26b0e16d34705662311d92c1dd11d327c618fef895783e5bcf86c7c2acd1 not found: ID does not exist" containerID="56cc26b0e16d34705662311d92c1dd11d327c618fef895783e5bcf86c7c2acd1" Sep 29 19:20:36 crc kubenswrapper[4792]: I0929 19:20:36.744693 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"56cc26b0e16d34705662311d92c1dd11d327c618fef895783e5bcf86c7c2acd1"} err="failed to get container status \"56cc26b0e16d34705662311d92c1dd11d327c618fef895783e5bcf86c7c2acd1\": rpc error: code = NotFound desc = could not find container \"56cc26b0e16d34705662311d92c1dd11d327c618fef895783e5bcf86c7c2acd1\": container with ID starting with 56cc26b0e16d34705662311d92c1dd11d327c618fef895783e5bcf86c7c2acd1 not found: ID does not exist" Sep 29 19:20:36 crc kubenswrapper[4792]: I0929 19:20:36.744719 4792 scope.go:117] "RemoveContainer" containerID="3c4a7cfa0a118a39c112b879b3ee42a8a363540714d36b8cd29232d433b94218" Sep 29 19:20:36 crc kubenswrapper[4792]: E0929 19:20:36.745064 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3c4a7cfa0a118a39c112b879b3ee42a8a363540714d36b8cd29232d433b94218\": container with ID starting with 3c4a7cfa0a118a39c112b879b3ee42a8a363540714d36b8cd29232d433b94218 not found: ID does not exist" containerID="3c4a7cfa0a118a39c112b879b3ee42a8a363540714d36b8cd29232d433b94218" Sep 29 19:20:36 crc kubenswrapper[4792]: I0929 19:20:36.745099 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3c4a7cfa0a118a39c112b879b3ee42a8a363540714d36b8cd29232d433b94218"} err="failed to get container status \"3c4a7cfa0a118a39c112b879b3ee42a8a363540714d36b8cd29232d433b94218\": rpc error: code = NotFound desc = could not find container \"3c4a7cfa0a118a39c112b879b3ee42a8a363540714d36b8cd29232d433b94218\": container with ID starting with 3c4a7cfa0a118a39c112b879b3ee42a8a363540714d36b8cd29232d433b94218 not found: ID does not exist" Sep 29 19:20:36 crc kubenswrapper[4792]: I0929 19:20:36.745117 4792 scope.go:117] "RemoveContainer" containerID="37023a652f13b3ba704458d5c1d0a09deb15c02731d6f4ba48598d35bb2b3bce" Sep 29 19:20:36 crc kubenswrapper[4792]: E0929 19:20:36.745530 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"37023a652f13b3ba704458d5c1d0a09deb15c02731d6f4ba48598d35bb2b3bce\": container with ID starting with 37023a652f13b3ba704458d5c1d0a09deb15c02731d6f4ba48598d35bb2b3bce not found: ID does not exist" containerID="37023a652f13b3ba704458d5c1d0a09deb15c02731d6f4ba48598d35bb2b3bce" Sep 29 19:20:36 crc kubenswrapper[4792]: I0929 19:20:36.745555 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"37023a652f13b3ba704458d5c1d0a09deb15c02731d6f4ba48598d35bb2b3bce"} err="failed to get container status \"37023a652f13b3ba704458d5c1d0a09deb15c02731d6f4ba48598d35bb2b3bce\": rpc error: code = NotFound desc = could not find container \"37023a652f13b3ba704458d5c1d0a09deb15c02731d6f4ba48598d35bb2b3bce\": container with ID starting with 37023a652f13b3ba704458d5c1d0a09deb15c02731d6f4ba48598d35bb2b3bce not found: ID does not exist" Sep 29 19:20:37 crc kubenswrapper[4792]: I0929 19:20:37.025880 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="16c021a5-5e9d-4b49-8952-0ab84e2f2428" path="/var/lib/kubelet/pods/16c021a5-5e9d-4b49-8952-0ab84e2f2428/volumes" Sep 29 19:20:41 crc kubenswrapper[4792]: I0929 19:20:41.960807 4792 patch_prober.go:28] interesting pod/machine-config-daemon-p5q59 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 19:20:41 crc kubenswrapper[4792]: I0929 19:20:41.963376 4792 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" podUID="0ae66548-086e-4ca9-bd6f-281ce46e7557" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 19:20:46 crc kubenswrapper[4792]: I0929 19:20:46.855281 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-x2x5c"] Sep 29 19:20:46 crc kubenswrapper[4792]: E0929 19:20:46.859037 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4b2b8596-a247-4709-b5ad-a25b53886b52" containerName="registry-server" Sep 29 19:20:46 crc kubenswrapper[4792]: I0929 19:20:46.859061 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="4b2b8596-a247-4709-b5ad-a25b53886b52" containerName="registry-server" Sep 29 19:20:46 crc kubenswrapper[4792]: E0929 19:20:46.859080 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="16c021a5-5e9d-4b49-8952-0ab84e2f2428" containerName="registry-server" Sep 29 19:20:46 crc kubenswrapper[4792]: I0929 19:20:46.859089 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="16c021a5-5e9d-4b49-8952-0ab84e2f2428" containerName="registry-server" Sep 29 19:20:46 crc kubenswrapper[4792]: E0929 19:20:46.859111 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4b2b8596-a247-4709-b5ad-a25b53886b52" containerName="extract-content" Sep 29 19:20:46 crc kubenswrapper[4792]: I0929 19:20:46.859119 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="4b2b8596-a247-4709-b5ad-a25b53886b52" containerName="extract-content" Sep 29 19:20:46 crc kubenswrapper[4792]: E0929 19:20:46.859132 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="16c021a5-5e9d-4b49-8952-0ab84e2f2428" containerName="extract-utilities" Sep 29 19:20:46 crc kubenswrapper[4792]: I0929 19:20:46.859140 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="16c021a5-5e9d-4b49-8952-0ab84e2f2428" containerName="extract-utilities" Sep 29 19:20:46 crc kubenswrapper[4792]: E0929 19:20:46.859150 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5c4ccfb1-8403-4880-b6af-254c2a9f7dac" containerName="dnsmasq-dns" Sep 29 19:20:46 crc kubenswrapper[4792]: I0929 19:20:46.859156 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="5c4ccfb1-8403-4880-b6af-254c2a9f7dac" containerName="dnsmasq-dns" Sep 29 19:20:46 crc kubenswrapper[4792]: E0929 19:20:46.859178 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5c4ccfb1-8403-4880-b6af-254c2a9f7dac" containerName="init" Sep 29 19:20:46 crc kubenswrapper[4792]: I0929 19:20:46.859187 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="5c4ccfb1-8403-4880-b6af-254c2a9f7dac" containerName="init" Sep 29 19:20:46 crc kubenswrapper[4792]: E0929 19:20:46.859202 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4b2b8596-a247-4709-b5ad-a25b53886b52" containerName="extract-utilities" Sep 29 19:20:46 crc kubenswrapper[4792]: I0929 19:20:46.859609 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="4b2b8596-a247-4709-b5ad-a25b53886b52" containerName="extract-utilities" Sep 29 19:20:46 crc kubenswrapper[4792]: E0929 19:20:46.859627 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="16c021a5-5e9d-4b49-8952-0ab84e2f2428" containerName="extract-content" Sep 29 19:20:46 crc kubenswrapper[4792]: I0929 19:20:46.859635 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="16c021a5-5e9d-4b49-8952-0ab84e2f2428" containerName="extract-content" Sep 29 19:20:46 crc kubenswrapper[4792]: I0929 19:20:46.859870 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="5c4ccfb1-8403-4880-b6af-254c2a9f7dac" containerName="dnsmasq-dns" Sep 29 19:20:46 crc kubenswrapper[4792]: I0929 19:20:46.859897 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="16c021a5-5e9d-4b49-8952-0ab84e2f2428" containerName="registry-server" Sep 29 19:20:46 crc kubenswrapper[4792]: I0929 19:20:46.859917 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="4b2b8596-a247-4709-b5ad-a25b53886b52" containerName="registry-server" Sep 29 19:20:46 crc kubenswrapper[4792]: I0929 19:20:46.862162 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-x2x5c" Sep 29 19:20:46 crc kubenswrapper[4792]: I0929 19:20:46.864887 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-jvdxd" Sep 29 19:20:46 crc kubenswrapper[4792]: I0929 19:20:46.868950 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Sep 29 19:20:46 crc kubenswrapper[4792]: I0929 19:20:46.869180 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Sep 29 19:20:46 crc kubenswrapper[4792]: I0929 19:20:46.869181 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Sep 29 19:20:46 crc kubenswrapper[4792]: I0929 19:20:46.876818 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-x2x5c"] Sep 29 19:20:46 crc kubenswrapper[4792]: I0929 19:20:46.986257 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s545p\" (UniqueName: \"kubernetes.io/projected/15519058-5c31-4b09-b9e8-68129ad2f41e-kube-api-access-s545p\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-x2x5c\" (UID: \"15519058-5c31-4b09-b9e8-68129ad2f41e\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-x2x5c" Sep 29 19:20:46 crc kubenswrapper[4792]: I0929 19:20:46.986328 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/15519058-5c31-4b09-b9e8-68129ad2f41e-repo-setup-combined-ca-bundle\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-x2x5c\" (UID: \"15519058-5c31-4b09-b9e8-68129ad2f41e\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-x2x5c" Sep 29 19:20:46 crc kubenswrapper[4792]: I0929 19:20:46.986487 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/15519058-5c31-4b09-b9e8-68129ad2f41e-ssh-key\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-x2x5c\" (UID: \"15519058-5c31-4b09-b9e8-68129ad2f41e\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-x2x5c" Sep 29 19:20:46 crc kubenswrapper[4792]: I0929 19:20:46.986566 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/15519058-5c31-4b09-b9e8-68129ad2f41e-inventory\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-x2x5c\" (UID: \"15519058-5c31-4b09-b9e8-68129ad2f41e\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-x2x5c" Sep 29 19:20:47 crc kubenswrapper[4792]: I0929 19:20:47.088601 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s545p\" (UniqueName: \"kubernetes.io/projected/15519058-5c31-4b09-b9e8-68129ad2f41e-kube-api-access-s545p\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-x2x5c\" (UID: \"15519058-5c31-4b09-b9e8-68129ad2f41e\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-x2x5c" Sep 29 19:20:47 crc kubenswrapper[4792]: I0929 19:20:47.088707 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/15519058-5c31-4b09-b9e8-68129ad2f41e-repo-setup-combined-ca-bundle\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-x2x5c\" (UID: \"15519058-5c31-4b09-b9e8-68129ad2f41e\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-x2x5c" Sep 29 19:20:47 crc kubenswrapper[4792]: I0929 19:20:47.088839 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/15519058-5c31-4b09-b9e8-68129ad2f41e-ssh-key\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-x2x5c\" (UID: \"15519058-5c31-4b09-b9e8-68129ad2f41e\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-x2x5c" Sep 29 19:20:47 crc kubenswrapper[4792]: I0929 19:20:47.090180 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/15519058-5c31-4b09-b9e8-68129ad2f41e-inventory\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-x2x5c\" (UID: \"15519058-5c31-4b09-b9e8-68129ad2f41e\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-x2x5c" Sep 29 19:20:47 crc kubenswrapper[4792]: I0929 19:20:47.094585 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/15519058-5c31-4b09-b9e8-68129ad2f41e-ssh-key\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-x2x5c\" (UID: \"15519058-5c31-4b09-b9e8-68129ad2f41e\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-x2x5c" Sep 29 19:20:47 crc kubenswrapper[4792]: I0929 19:20:47.094827 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/15519058-5c31-4b09-b9e8-68129ad2f41e-repo-setup-combined-ca-bundle\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-x2x5c\" (UID: \"15519058-5c31-4b09-b9e8-68129ad2f41e\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-x2x5c" Sep 29 19:20:47 crc kubenswrapper[4792]: I0929 19:20:47.096255 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/15519058-5c31-4b09-b9e8-68129ad2f41e-inventory\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-x2x5c\" (UID: \"15519058-5c31-4b09-b9e8-68129ad2f41e\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-x2x5c" Sep 29 19:20:47 crc kubenswrapper[4792]: I0929 19:20:47.108656 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s545p\" (UniqueName: \"kubernetes.io/projected/15519058-5c31-4b09-b9e8-68129ad2f41e-kube-api-access-s545p\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-x2x5c\" (UID: \"15519058-5c31-4b09-b9e8-68129ad2f41e\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-x2x5c" Sep 29 19:20:47 crc kubenswrapper[4792]: I0929 19:20:47.182523 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-x2x5c" Sep 29 19:20:47 crc kubenswrapper[4792]: I0929 19:20:47.958648 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-x2x5c"] Sep 29 19:20:48 crc kubenswrapper[4792]: I0929 19:20:48.782365 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-x2x5c" event={"ID":"15519058-5c31-4b09-b9e8-68129ad2f41e","Type":"ContainerStarted","Data":"2a6a777f78281aeceb278b82102a2073cc9de37b645caf1d4da09a931af713c7"} Sep 29 19:20:49 crc kubenswrapper[4792]: I0929 19:20:49.729168 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-server-0" Sep 29 19:20:50 crc kubenswrapper[4792]: I0929 19:20:50.808035 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-cell1-server-0" Sep 29 19:21:01 crc kubenswrapper[4792]: I0929 19:21:01.908555 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-x2x5c" event={"ID":"15519058-5c31-4b09-b9e8-68129ad2f41e","Type":"ContainerStarted","Data":"ebd76432495d46a921727822e2fc8fe34a07df2072deaf513519b38e04aa8281"} Sep 29 19:21:01 crc kubenswrapper[4792]: I0929 19:21:01.941993 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-x2x5c" podStartSLOduration=2.354426687 podStartE2EDuration="15.941975755s" podCreationTimestamp="2025-09-29 19:20:46 +0000 UTC" firstStartedPulling="2025-09-29 19:20:47.963519124 +0000 UTC m=+1459.956826520" lastFinishedPulling="2025-09-29 19:21:01.551068192 +0000 UTC m=+1473.544375588" observedRunningTime="2025-09-29 19:21:01.940688392 +0000 UTC m=+1473.933995808" watchObservedRunningTime="2025-09-29 19:21:01.941975755 +0000 UTC m=+1473.935283171" Sep 29 19:21:07 crc kubenswrapper[4792]: I0929 19:21:07.085047 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-ztjfj"] Sep 29 19:21:07 crc kubenswrapper[4792]: I0929 19:21:07.090393 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-ztjfj" Sep 29 19:21:07 crc kubenswrapper[4792]: I0929 19:21:07.103853 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-ztjfj"] Sep 29 19:21:07 crc kubenswrapper[4792]: I0929 19:21:07.196157 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f9a97d1e-25ce-4b67-a962-a40636f31649-catalog-content\") pod \"certified-operators-ztjfj\" (UID: \"f9a97d1e-25ce-4b67-a962-a40636f31649\") " pod="openshift-marketplace/certified-operators-ztjfj" Sep 29 19:21:07 crc kubenswrapper[4792]: I0929 19:21:07.196415 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gc9n9\" (UniqueName: \"kubernetes.io/projected/f9a97d1e-25ce-4b67-a962-a40636f31649-kube-api-access-gc9n9\") pod \"certified-operators-ztjfj\" (UID: \"f9a97d1e-25ce-4b67-a962-a40636f31649\") " pod="openshift-marketplace/certified-operators-ztjfj" Sep 29 19:21:07 crc kubenswrapper[4792]: I0929 19:21:07.196523 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f9a97d1e-25ce-4b67-a962-a40636f31649-utilities\") pod \"certified-operators-ztjfj\" (UID: \"f9a97d1e-25ce-4b67-a962-a40636f31649\") " pod="openshift-marketplace/certified-operators-ztjfj" Sep 29 19:21:07 crc kubenswrapper[4792]: I0929 19:21:07.298350 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f9a97d1e-25ce-4b67-a962-a40636f31649-catalog-content\") pod \"certified-operators-ztjfj\" (UID: \"f9a97d1e-25ce-4b67-a962-a40636f31649\") " pod="openshift-marketplace/certified-operators-ztjfj" Sep 29 19:21:07 crc kubenswrapper[4792]: I0929 19:21:07.298443 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gc9n9\" (UniqueName: \"kubernetes.io/projected/f9a97d1e-25ce-4b67-a962-a40636f31649-kube-api-access-gc9n9\") pod \"certified-operators-ztjfj\" (UID: \"f9a97d1e-25ce-4b67-a962-a40636f31649\") " pod="openshift-marketplace/certified-operators-ztjfj" Sep 29 19:21:07 crc kubenswrapper[4792]: I0929 19:21:07.298468 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f9a97d1e-25ce-4b67-a962-a40636f31649-utilities\") pod \"certified-operators-ztjfj\" (UID: \"f9a97d1e-25ce-4b67-a962-a40636f31649\") " pod="openshift-marketplace/certified-operators-ztjfj" Sep 29 19:21:07 crc kubenswrapper[4792]: I0929 19:21:07.298906 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f9a97d1e-25ce-4b67-a962-a40636f31649-utilities\") pod \"certified-operators-ztjfj\" (UID: \"f9a97d1e-25ce-4b67-a962-a40636f31649\") " pod="openshift-marketplace/certified-operators-ztjfj" Sep 29 19:21:07 crc kubenswrapper[4792]: I0929 19:21:07.298923 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f9a97d1e-25ce-4b67-a962-a40636f31649-catalog-content\") pod \"certified-operators-ztjfj\" (UID: \"f9a97d1e-25ce-4b67-a962-a40636f31649\") " pod="openshift-marketplace/certified-operators-ztjfj" Sep 29 19:21:07 crc kubenswrapper[4792]: I0929 19:21:07.324820 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gc9n9\" (UniqueName: \"kubernetes.io/projected/f9a97d1e-25ce-4b67-a962-a40636f31649-kube-api-access-gc9n9\") pod \"certified-operators-ztjfj\" (UID: \"f9a97d1e-25ce-4b67-a962-a40636f31649\") " pod="openshift-marketplace/certified-operators-ztjfj" Sep 29 19:21:07 crc kubenswrapper[4792]: I0929 19:21:07.422030 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-ztjfj" Sep 29 19:21:07 crc kubenswrapper[4792]: I0929 19:21:07.955057 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-ztjfj"] Sep 29 19:21:08 crc kubenswrapper[4792]: I0929 19:21:08.973463 4792 generic.go:334] "Generic (PLEG): container finished" podID="f9a97d1e-25ce-4b67-a962-a40636f31649" containerID="15ddfe65af618eb344aefb16902026061d3291584a4cfd6f00a0422ab36072f1" exitCode=0 Sep 29 19:21:08 crc kubenswrapper[4792]: I0929 19:21:08.973660 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-ztjfj" event={"ID":"f9a97d1e-25ce-4b67-a962-a40636f31649","Type":"ContainerDied","Data":"15ddfe65af618eb344aefb16902026061d3291584a4cfd6f00a0422ab36072f1"} Sep 29 19:21:08 crc kubenswrapper[4792]: I0929 19:21:08.973754 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-ztjfj" event={"ID":"f9a97d1e-25ce-4b67-a962-a40636f31649","Type":"ContainerStarted","Data":"abb7be2047690e58cf50006c63be4201ba43ae349a324a7da96c1c4b95e1c1d9"} Sep 29 19:21:09 crc kubenswrapper[4792]: I0929 19:21:09.985516 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-ztjfj" event={"ID":"f9a97d1e-25ce-4b67-a962-a40636f31649","Type":"ContainerStarted","Data":"92742c8687ddb36356593b821031f26ef49b982f5970913a041330e25606ce9e"} Sep 29 19:21:11 crc kubenswrapper[4792]: I0929 19:21:11.959797 4792 patch_prober.go:28] interesting pod/machine-config-daemon-p5q59 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 19:21:11 crc kubenswrapper[4792]: I0929 19:21:11.960152 4792 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" podUID="0ae66548-086e-4ca9-bd6f-281ce46e7557" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 19:21:11 crc kubenswrapper[4792]: I0929 19:21:11.960199 4792 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" Sep 29 19:21:11 crc kubenswrapper[4792]: I0929 19:21:11.961044 4792 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"4da2f3e8ceb0a8e1e559272a172c2d5b11ff46e91e3ba55c40264756f850c284"} pod="openshift-machine-config-operator/machine-config-daemon-p5q59" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 29 19:21:11 crc kubenswrapper[4792]: I0929 19:21:11.961102 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" podUID="0ae66548-086e-4ca9-bd6f-281ce46e7557" containerName="machine-config-daemon" containerID="cri-o://4da2f3e8ceb0a8e1e559272a172c2d5b11ff46e91e3ba55c40264756f850c284" gracePeriod=600 Sep 29 19:21:12 crc kubenswrapper[4792]: I0929 19:21:12.003418 4792 generic.go:334] "Generic (PLEG): container finished" podID="f9a97d1e-25ce-4b67-a962-a40636f31649" containerID="92742c8687ddb36356593b821031f26ef49b982f5970913a041330e25606ce9e" exitCode=0 Sep 29 19:21:12 crc kubenswrapper[4792]: I0929 19:21:12.003458 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-ztjfj" event={"ID":"f9a97d1e-25ce-4b67-a962-a40636f31649","Type":"ContainerDied","Data":"92742c8687ddb36356593b821031f26ef49b982f5970913a041330e25606ce9e"} Sep 29 19:21:13 crc kubenswrapper[4792]: I0929 19:21:13.017880 4792 generic.go:334] "Generic (PLEG): container finished" podID="0ae66548-086e-4ca9-bd6f-281ce46e7557" containerID="4da2f3e8ceb0a8e1e559272a172c2d5b11ff46e91e3ba55c40264756f850c284" exitCode=0 Sep 29 19:21:13 crc kubenswrapper[4792]: I0929 19:21:13.025505 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-ztjfj" event={"ID":"f9a97d1e-25ce-4b67-a962-a40636f31649","Type":"ContainerStarted","Data":"cade52593c162191bcec6c7ac4e1a3cefd105b93ec2084a75c84094563ed5fd4"} Sep 29 19:21:13 crc kubenswrapper[4792]: I0929 19:21:13.025536 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" event={"ID":"0ae66548-086e-4ca9-bd6f-281ce46e7557","Type":"ContainerDied","Data":"4da2f3e8ceb0a8e1e559272a172c2d5b11ff46e91e3ba55c40264756f850c284"} Sep 29 19:21:13 crc kubenswrapper[4792]: I0929 19:21:13.025549 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" event={"ID":"0ae66548-086e-4ca9-bd6f-281ce46e7557","Type":"ContainerStarted","Data":"ccfabdbbd2fc28db3a7759f30ddf4fbe532580d663aea81dbf9d9f716c69f3f7"} Sep 29 19:21:13 crc kubenswrapper[4792]: I0929 19:21:13.025565 4792 scope.go:117] "RemoveContainer" containerID="e8bc360625c05ed5b39b0bdabe37934fb480a91515b533db0262f5a58fa6cf95" Sep 29 19:21:13 crc kubenswrapper[4792]: I0929 19:21:13.087023 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-ztjfj" podStartSLOduration=2.581022267 podStartE2EDuration="6.087004612s" podCreationTimestamp="2025-09-29 19:21:07 +0000 UTC" firstStartedPulling="2025-09-29 19:21:08.975781747 +0000 UTC m=+1480.969089143" lastFinishedPulling="2025-09-29 19:21:12.481764092 +0000 UTC m=+1484.475071488" observedRunningTime="2025-09-29 19:21:13.073851788 +0000 UTC m=+1485.067159194" watchObservedRunningTime="2025-09-29 19:21:13.087004612 +0000 UTC m=+1485.080312008" Sep 29 19:21:15 crc kubenswrapper[4792]: I0929 19:21:15.039604 4792 generic.go:334] "Generic (PLEG): container finished" podID="15519058-5c31-4b09-b9e8-68129ad2f41e" containerID="ebd76432495d46a921727822e2fc8fe34a07df2072deaf513519b38e04aa8281" exitCode=0 Sep 29 19:21:15 crc kubenswrapper[4792]: I0929 19:21:15.039712 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-x2x5c" event={"ID":"15519058-5c31-4b09-b9e8-68129ad2f41e","Type":"ContainerDied","Data":"ebd76432495d46a921727822e2fc8fe34a07df2072deaf513519b38e04aa8281"} Sep 29 19:21:16 crc kubenswrapper[4792]: I0929 19:21:16.491789 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-x2x5c" Sep 29 19:21:16 crc kubenswrapper[4792]: I0929 19:21:16.572407 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s545p\" (UniqueName: \"kubernetes.io/projected/15519058-5c31-4b09-b9e8-68129ad2f41e-kube-api-access-s545p\") pod \"15519058-5c31-4b09-b9e8-68129ad2f41e\" (UID: \"15519058-5c31-4b09-b9e8-68129ad2f41e\") " Sep 29 19:21:16 crc kubenswrapper[4792]: I0929 19:21:16.572453 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/15519058-5c31-4b09-b9e8-68129ad2f41e-ssh-key\") pod \"15519058-5c31-4b09-b9e8-68129ad2f41e\" (UID: \"15519058-5c31-4b09-b9e8-68129ad2f41e\") " Sep 29 19:21:16 crc kubenswrapper[4792]: I0929 19:21:16.572514 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/15519058-5c31-4b09-b9e8-68129ad2f41e-repo-setup-combined-ca-bundle\") pod \"15519058-5c31-4b09-b9e8-68129ad2f41e\" (UID: \"15519058-5c31-4b09-b9e8-68129ad2f41e\") " Sep 29 19:21:16 crc kubenswrapper[4792]: I0929 19:21:16.573449 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/15519058-5c31-4b09-b9e8-68129ad2f41e-inventory\") pod \"15519058-5c31-4b09-b9e8-68129ad2f41e\" (UID: \"15519058-5c31-4b09-b9e8-68129ad2f41e\") " Sep 29 19:21:16 crc kubenswrapper[4792]: I0929 19:21:16.581406 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/15519058-5c31-4b09-b9e8-68129ad2f41e-repo-setup-combined-ca-bundle" (OuterVolumeSpecName: "repo-setup-combined-ca-bundle") pod "15519058-5c31-4b09-b9e8-68129ad2f41e" (UID: "15519058-5c31-4b09-b9e8-68129ad2f41e"). InnerVolumeSpecName "repo-setup-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:21:16 crc kubenswrapper[4792]: I0929 19:21:16.582358 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/15519058-5c31-4b09-b9e8-68129ad2f41e-kube-api-access-s545p" (OuterVolumeSpecName: "kube-api-access-s545p") pod "15519058-5c31-4b09-b9e8-68129ad2f41e" (UID: "15519058-5c31-4b09-b9e8-68129ad2f41e"). InnerVolumeSpecName "kube-api-access-s545p". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:21:16 crc kubenswrapper[4792]: I0929 19:21:16.610481 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/15519058-5c31-4b09-b9e8-68129ad2f41e-inventory" (OuterVolumeSpecName: "inventory") pod "15519058-5c31-4b09-b9e8-68129ad2f41e" (UID: "15519058-5c31-4b09-b9e8-68129ad2f41e"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:21:16 crc kubenswrapper[4792]: I0929 19:21:16.611813 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/15519058-5c31-4b09-b9e8-68129ad2f41e-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "15519058-5c31-4b09-b9e8-68129ad2f41e" (UID: "15519058-5c31-4b09-b9e8-68129ad2f41e"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:21:16 crc kubenswrapper[4792]: I0929 19:21:16.675453 4792 reconciler_common.go:293] "Volume detached for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/15519058-5c31-4b09-b9e8-68129ad2f41e-repo-setup-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 19:21:16 crc kubenswrapper[4792]: I0929 19:21:16.675497 4792 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/15519058-5c31-4b09-b9e8-68129ad2f41e-inventory\") on node \"crc\" DevicePath \"\"" Sep 29 19:21:16 crc kubenswrapper[4792]: I0929 19:21:16.675511 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s545p\" (UniqueName: \"kubernetes.io/projected/15519058-5c31-4b09-b9e8-68129ad2f41e-kube-api-access-s545p\") on node \"crc\" DevicePath \"\"" Sep 29 19:21:16 crc kubenswrapper[4792]: I0929 19:21:16.675524 4792 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/15519058-5c31-4b09-b9e8-68129ad2f41e-ssh-key\") on node \"crc\" DevicePath \"\"" Sep 29 19:21:17 crc kubenswrapper[4792]: I0929 19:21:17.063098 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-x2x5c" event={"ID":"15519058-5c31-4b09-b9e8-68129ad2f41e","Type":"ContainerDied","Data":"2a6a777f78281aeceb278b82102a2073cc9de37b645caf1d4da09a931af713c7"} Sep 29 19:21:17 crc kubenswrapper[4792]: I0929 19:21:17.063133 4792 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2a6a777f78281aeceb278b82102a2073cc9de37b645caf1d4da09a931af713c7" Sep 29 19:21:17 crc kubenswrapper[4792]: I0929 19:21:17.063190 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-x2x5c" Sep 29 19:21:17 crc kubenswrapper[4792]: I0929 19:21:17.138625 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/redhat-edpm-deployment-openstack-edpm-ipam-867d4"] Sep 29 19:21:17 crc kubenswrapper[4792]: E0929 19:21:17.139088 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="15519058-5c31-4b09-b9e8-68129ad2f41e" containerName="repo-setup-edpm-deployment-openstack-edpm-ipam" Sep 29 19:21:17 crc kubenswrapper[4792]: I0929 19:21:17.139113 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="15519058-5c31-4b09-b9e8-68129ad2f41e" containerName="repo-setup-edpm-deployment-openstack-edpm-ipam" Sep 29 19:21:17 crc kubenswrapper[4792]: I0929 19:21:17.139422 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="15519058-5c31-4b09-b9e8-68129ad2f41e" containerName="repo-setup-edpm-deployment-openstack-edpm-ipam" Sep 29 19:21:17 crc kubenswrapper[4792]: I0929 19:21:17.140033 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-867d4" Sep 29 19:21:17 crc kubenswrapper[4792]: I0929 19:21:17.143583 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Sep 29 19:21:17 crc kubenswrapper[4792]: I0929 19:21:17.144123 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-jvdxd" Sep 29 19:21:17 crc kubenswrapper[4792]: I0929 19:21:17.144366 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Sep 29 19:21:17 crc kubenswrapper[4792]: I0929 19:21:17.146093 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Sep 29 19:21:17 crc kubenswrapper[4792]: I0929 19:21:17.165179 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/redhat-edpm-deployment-openstack-edpm-ipam-867d4"] Sep 29 19:21:17 crc kubenswrapper[4792]: I0929 19:21:17.285371 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7cgz8\" (UniqueName: \"kubernetes.io/projected/a44c4b7c-994b-4f5f-8b00-ca9da0a744f4-kube-api-access-7cgz8\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-867d4\" (UID: \"a44c4b7c-994b-4f5f-8b00-ca9da0a744f4\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-867d4" Sep 29 19:21:17 crc kubenswrapper[4792]: I0929 19:21:17.285779 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a44c4b7c-994b-4f5f-8b00-ca9da0a744f4-ssh-key\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-867d4\" (UID: \"a44c4b7c-994b-4f5f-8b00-ca9da0a744f4\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-867d4" Sep 29 19:21:17 crc kubenswrapper[4792]: I0929 19:21:17.285870 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a44c4b7c-994b-4f5f-8b00-ca9da0a744f4-inventory\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-867d4\" (UID: \"a44c4b7c-994b-4f5f-8b00-ca9da0a744f4\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-867d4" Sep 29 19:21:17 crc kubenswrapper[4792]: I0929 19:21:17.387589 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a44c4b7c-994b-4f5f-8b00-ca9da0a744f4-ssh-key\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-867d4\" (UID: \"a44c4b7c-994b-4f5f-8b00-ca9da0a744f4\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-867d4" Sep 29 19:21:17 crc kubenswrapper[4792]: I0929 19:21:17.388713 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a44c4b7c-994b-4f5f-8b00-ca9da0a744f4-inventory\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-867d4\" (UID: \"a44c4b7c-994b-4f5f-8b00-ca9da0a744f4\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-867d4" Sep 29 19:21:17 crc kubenswrapper[4792]: I0929 19:21:17.388942 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7cgz8\" (UniqueName: \"kubernetes.io/projected/a44c4b7c-994b-4f5f-8b00-ca9da0a744f4-kube-api-access-7cgz8\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-867d4\" (UID: \"a44c4b7c-994b-4f5f-8b00-ca9da0a744f4\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-867d4" Sep 29 19:21:17 crc kubenswrapper[4792]: I0929 19:21:17.393608 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a44c4b7c-994b-4f5f-8b00-ca9da0a744f4-ssh-key\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-867d4\" (UID: \"a44c4b7c-994b-4f5f-8b00-ca9da0a744f4\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-867d4" Sep 29 19:21:17 crc kubenswrapper[4792]: I0929 19:21:17.393807 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a44c4b7c-994b-4f5f-8b00-ca9da0a744f4-inventory\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-867d4\" (UID: \"a44c4b7c-994b-4f5f-8b00-ca9da0a744f4\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-867d4" Sep 29 19:21:17 crc kubenswrapper[4792]: I0929 19:21:17.410329 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7cgz8\" (UniqueName: \"kubernetes.io/projected/a44c4b7c-994b-4f5f-8b00-ca9da0a744f4-kube-api-access-7cgz8\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-867d4\" (UID: \"a44c4b7c-994b-4f5f-8b00-ca9da0a744f4\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-867d4" Sep 29 19:21:17 crc kubenswrapper[4792]: I0929 19:21:17.423027 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-ztjfj" Sep 29 19:21:17 crc kubenswrapper[4792]: I0929 19:21:17.424246 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-ztjfj" Sep 29 19:21:17 crc kubenswrapper[4792]: I0929 19:21:17.465324 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-867d4" Sep 29 19:21:18 crc kubenswrapper[4792]: I0929 19:21:18.200738 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/redhat-edpm-deployment-openstack-edpm-ipam-867d4"] Sep 29 19:21:18 crc kubenswrapper[4792]: I0929 19:21:18.496291 4792 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/certified-operators-ztjfj" podUID="f9a97d1e-25ce-4b67-a962-a40636f31649" containerName="registry-server" probeResult="failure" output=< Sep 29 19:21:18 crc kubenswrapper[4792]: timeout: failed to connect service ":50051" within 1s Sep 29 19:21:18 crc kubenswrapper[4792]: > Sep 29 19:21:19 crc kubenswrapper[4792]: I0929 19:21:19.096401 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-867d4" event={"ID":"a44c4b7c-994b-4f5f-8b00-ca9da0a744f4","Type":"ContainerStarted","Data":"47ed56f50cf76084665d73a5f00079a48de4812176ceb51326489526f7be2723"} Sep 29 19:21:19 crc kubenswrapper[4792]: I0929 19:21:19.097035 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-867d4" event={"ID":"a44c4b7c-994b-4f5f-8b00-ca9da0a744f4","Type":"ContainerStarted","Data":"f7a4d9bd93688c6809ef9cc4c648d309a9675cc58778a2ca68b6923c23b81044"} Sep 29 19:21:19 crc kubenswrapper[4792]: I0929 19:21:19.132391 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-867d4" podStartSLOduration=1.9662416569999999 podStartE2EDuration="2.132367722s" podCreationTimestamp="2025-09-29 19:21:17 +0000 UTC" firstStartedPulling="2025-09-29 19:21:18.211889007 +0000 UTC m=+1490.205196403" lastFinishedPulling="2025-09-29 19:21:18.378015072 +0000 UTC m=+1490.371322468" observedRunningTime="2025-09-29 19:21:19.120163952 +0000 UTC m=+1491.113471368" watchObservedRunningTime="2025-09-29 19:21:19.132367722 +0000 UTC m=+1491.125675118" Sep 29 19:21:22 crc kubenswrapper[4792]: I0929 19:21:22.128508 4792 generic.go:334] "Generic (PLEG): container finished" podID="a44c4b7c-994b-4f5f-8b00-ca9da0a744f4" containerID="47ed56f50cf76084665d73a5f00079a48de4812176ceb51326489526f7be2723" exitCode=0 Sep 29 19:21:22 crc kubenswrapper[4792]: I0929 19:21:22.128597 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-867d4" event={"ID":"a44c4b7c-994b-4f5f-8b00-ca9da0a744f4","Type":"ContainerDied","Data":"47ed56f50cf76084665d73a5f00079a48de4812176ceb51326489526f7be2723"} Sep 29 19:21:23 crc kubenswrapper[4792]: I0929 19:21:23.581168 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-867d4" Sep 29 19:21:23 crc kubenswrapper[4792]: I0929 19:21:23.731749 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7cgz8\" (UniqueName: \"kubernetes.io/projected/a44c4b7c-994b-4f5f-8b00-ca9da0a744f4-kube-api-access-7cgz8\") pod \"a44c4b7c-994b-4f5f-8b00-ca9da0a744f4\" (UID: \"a44c4b7c-994b-4f5f-8b00-ca9da0a744f4\") " Sep 29 19:21:23 crc kubenswrapper[4792]: I0929 19:21:23.731925 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a44c4b7c-994b-4f5f-8b00-ca9da0a744f4-ssh-key\") pod \"a44c4b7c-994b-4f5f-8b00-ca9da0a744f4\" (UID: \"a44c4b7c-994b-4f5f-8b00-ca9da0a744f4\") " Sep 29 19:21:23 crc kubenswrapper[4792]: I0929 19:21:23.732093 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a44c4b7c-994b-4f5f-8b00-ca9da0a744f4-inventory\") pod \"a44c4b7c-994b-4f5f-8b00-ca9da0a744f4\" (UID: \"a44c4b7c-994b-4f5f-8b00-ca9da0a744f4\") " Sep 29 19:21:23 crc kubenswrapper[4792]: I0929 19:21:23.738128 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a44c4b7c-994b-4f5f-8b00-ca9da0a744f4-kube-api-access-7cgz8" (OuterVolumeSpecName: "kube-api-access-7cgz8") pod "a44c4b7c-994b-4f5f-8b00-ca9da0a744f4" (UID: "a44c4b7c-994b-4f5f-8b00-ca9da0a744f4"). InnerVolumeSpecName "kube-api-access-7cgz8". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:21:23 crc kubenswrapper[4792]: I0929 19:21:23.764474 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a44c4b7c-994b-4f5f-8b00-ca9da0a744f4-inventory" (OuterVolumeSpecName: "inventory") pod "a44c4b7c-994b-4f5f-8b00-ca9da0a744f4" (UID: "a44c4b7c-994b-4f5f-8b00-ca9da0a744f4"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:21:23 crc kubenswrapper[4792]: I0929 19:21:23.768295 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a44c4b7c-994b-4f5f-8b00-ca9da0a744f4-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "a44c4b7c-994b-4f5f-8b00-ca9da0a744f4" (UID: "a44c4b7c-994b-4f5f-8b00-ca9da0a744f4"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:21:23 crc kubenswrapper[4792]: I0929 19:21:23.834496 4792 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a44c4b7c-994b-4f5f-8b00-ca9da0a744f4-inventory\") on node \"crc\" DevicePath \"\"" Sep 29 19:21:23 crc kubenswrapper[4792]: I0929 19:21:23.834528 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7cgz8\" (UniqueName: \"kubernetes.io/projected/a44c4b7c-994b-4f5f-8b00-ca9da0a744f4-kube-api-access-7cgz8\") on node \"crc\" DevicePath \"\"" Sep 29 19:21:23 crc kubenswrapper[4792]: I0929 19:21:23.834539 4792 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a44c4b7c-994b-4f5f-8b00-ca9da0a744f4-ssh-key\") on node \"crc\" DevicePath \"\"" Sep 29 19:21:24 crc kubenswrapper[4792]: I0929 19:21:24.150860 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-867d4" event={"ID":"a44c4b7c-994b-4f5f-8b00-ca9da0a744f4","Type":"ContainerDied","Data":"f7a4d9bd93688c6809ef9cc4c648d309a9675cc58778a2ca68b6923c23b81044"} Sep 29 19:21:24 crc kubenswrapper[4792]: I0929 19:21:24.150896 4792 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f7a4d9bd93688c6809ef9cc4c648d309a9675cc58778a2ca68b6923c23b81044" Sep 29 19:21:24 crc kubenswrapper[4792]: I0929 19:21:24.150915 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-867d4" Sep 29 19:21:24 crc kubenswrapper[4792]: I0929 19:21:24.214484 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-slpnn"] Sep 29 19:21:24 crc kubenswrapper[4792]: E0929 19:21:24.214961 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a44c4b7c-994b-4f5f-8b00-ca9da0a744f4" containerName="redhat-edpm-deployment-openstack-edpm-ipam" Sep 29 19:21:24 crc kubenswrapper[4792]: I0929 19:21:24.214987 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="a44c4b7c-994b-4f5f-8b00-ca9da0a744f4" containerName="redhat-edpm-deployment-openstack-edpm-ipam" Sep 29 19:21:24 crc kubenswrapper[4792]: I0929 19:21:24.215256 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="a44c4b7c-994b-4f5f-8b00-ca9da0a744f4" containerName="redhat-edpm-deployment-openstack-edpm-ipam" Sep 29 19:21:24 crc kubenswrapper[4792]: I0929 19:21:24.216253 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-slpnn" Sep 29 19:21:24 crc kubenswrapper[4792]: I0929 19:21:24.222459 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Sep 29 19:21:24 crc kubenswrapper[4792]: I0929 19:21:24.222663 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Sep 29 19:21:24 crc kubenswrapper[4792]: I0929 19:21:24.222783 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-jvdxd" Sep 29 19:21:24 crc kubenswrapper[4792]: I0929 19:21:24.223039 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Sep 29 19:21:24 crc kubenswrapper[4792]: I0929 19:21:24.228731 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-slpnn"] Sep 29 19:21:24 crc kubenswrapper[4792]: I0929 19:21:24.343804 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xpbw2\" (UniqueName: \"kubernetes.io/projected/3dbdb326-a5bc-4d53-b4cc-6971b8a715e6-kube-api-access-xpbw2\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-slpnn\" (UID: \"3dbdb326-a5bc-4d53-b4cc-6971b8a715e6\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-slpnn" Sep 29 19:21:24 crc kubenswrapper[4792]: I0929 19:21:24.343887 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3dbdb326-a5bc-4d53-b4cc-6971b8a715e6-bootstrap-combined-ca-bundle\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-slpnn\" (UID: \"3dbdb326-a5bc-4d53-b4cc-6971b8a715e6\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-slpnn" Sep 29 19:21:24 crc kubenswrapper[4792]: I0929 19:21:24.343945 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/3dbdb326-a5bc-4d53-b4cc-6971b8a715e6-inventory\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-slpnn\" (UID: \"3dbdb326-a5bc-4d53-b4cc-6971b8a715e6\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-slpnn" Sep 29 19:21:24 crc kubenswrapper[4792]: I0929 19:21:24.344109 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/3dbdb326-a5bc-4d53-b4cc-6971b8a715e6-ssh-key\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-slpnn\" (UID: \"3dbdb326-a5bc-4d53-b4cc-6971b8a715e6\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-slpnn" Sep 29 19:21:24 crc kubenswrapper[4792]: I0929 19:21:24.446846 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xpbw2\" (UniqueName: \"kubernetes.io/projected/3dbdb326-a5bc-4d53-b4cc-6971b8a715e6-kube-api-access-xpbw2\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-slpnn\" (UID: \"3dbdb326-a5bc-4d53-b4cc-6971b8a715e6\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-slpnn" Sep 29 19:21:24 crc kubenswrapper[4792]: I0929 19:21:24.447007 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3dbdb326-a5bc-4d53-b4cc-6971b8a715e6-bootstrap-combined-ca-bundle\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-slpnn\" (UID: \"3dbdb326-a5bc-4d53-b4cc-6971b8a715e6\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-slpnn" Sep 29 19:21:24 crc kubenswrapper[4792]: I0929 19:21:24.447115 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/3dbdb326-a5bc-4d53-b4cc-6971b8a715e6-inventory\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-slpnn\" (UID: \"3dbdb326-a5bc-4d53-b4cc-6971b8a715e6\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-slpnn" Sep 29 19:21:24 crc kubenswrapper[4792]: I0929 19:21:24.447731 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/3dbdb326-a5bc-4d53-b4cc-6971b8a715e6-ssh-key\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-slpnn\" (UID: \"3dbdb326-a5bc-4d53-b4cc-6971b8a715e6\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-slpnn" Sep 29 19:21:24 crc kubenswrapper[4792]: I0929 19:21:24.451526 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/3dbdb326-a5bc-4d53-b4cc-6971b8a715e6-inventory\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-slpnn\" (UID: \"3dbdb326-a5bc-4d53-b4cc-6971b8a715e6\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-slpnn" Sep 29 19:21:24 crc kubenswrapper[4792]: I0929 19:21:24.452103 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/3dbdb326-a5bc-4d53-b4cc-6971b8a715e6-ssh-key\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-slpnn\" (UID: \"3dbdb326-a5bc-4d53-b4cc-6971b8a715e6\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-slpnn" Sep 29 19:21:24 crc kubenswrapper[4792]: I0929 19:21:24.458282 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3dbdb326-a5bc-4d53-b4cc-6971b8a715e6-bootstrap-combined-ca-bundle\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-slpnn\" (UID: \"3dbdb326-a5bc-4d53-b4cc-6971b8a715e6\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-slpnn" Sep 29 19:21:24 crc kubenswrapper[4792]: I0929 19:21:24.466646 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xpbw2\" (UniqueName: \"kubernetes.io/projected/3dbdb326-a5bc-4d53-b4cc-6971b8a715e6-kube-api-access-xpbw2\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-slpnn\" (UID: \"3dbdb326-a5bc-4d53-b4cc-6971b8a715e6\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-slpnn" Sep 29 19:21:24 crc kubenswrapper[4792]: I0929 19:21:24.540035 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-slpnn" Sep 29 19:21:25 crc kubenswrapper[4792]: I0929 19:21:25.089790 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-slpnn"] Sep 29 19:21:25 crc kubenswrapper[4792]: I0929 19:21:25.161495 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-slpnn" event={"ID":"3dbdb326-a5bc-4d53-b4cc-6971b8a715e6","Type":"ContainerStarted","Data":"df0ea212ecda2872e909b30174edea4ff517c1b1fc93cebf2f178d8c61d983ee"} Sep 29 19:21:26 crc kubenswrapper[4792]: I0929 19:21:26.171791 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-slpnn" event={"ID":"3dbdb326-a5bc-4d53-b4cc-6971b8a715e6","Type":"ContainerStarted","Data":"e89f3e72703ecd76d08408d5d7ac60a91ff2ceb362a7d965a99d540077a017c5"} Sep 29 19:21:26 crc kubenswrapper[4792]: I0929 19:21:26.195918 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-slpnn" podStartSLOduration=2.031955133 podStartE2EDuration="2.195895711s" podCreationTimestamp="2025-09-29 19:21:24 +0000 UTC" firstStartedPulling="2025-09-29 19:21:25.079551034 +0000 UTC m=+1497.072858430" lastFinishedPulling="2025-09-29 19:21:25.243491602 +0000 UTC m=+1497.236799008" observedRunningTime="2025-09-29 19:21:26.189814082 +0000 UTC m=+1498.183121478" watchObservedRunningTime="2025-09-29 19:21:26.195895711 +0000 UTC m=+1498.189203107" Sep 29 19:21:27 crc kubenswrapper[4792]: I0929 19:21:27.470738 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-ztjfj" Sep 29 19:21:27 crc kubenswrapper[4792]: I0929 19:21:27.525103 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-ztjfj" Sep 29 19:21:27 crc kubenswrapper[4792]: I0929 19:21:27.710704 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-ztjfj"] Sep 29 19:21:29 crc kubenswrapper[4792]: I0929 19:21:29.196803 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-ztjfj" podUID="f9a97d1e-25ce-4b67-a962-a40636f31649" containerName="registry-server" containerID="cri-o://cade52593c162191bcec6c7ac4e1a3cefd105b93ec2084a75c84094563ed5fd4" gracePeriod=2 Sep 29 19:21:29 crc kubenswrapper[4792]: I0929 19:21:29.634097 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-ztjfj" Sep 29 19:21:29 crc kubenswrapper[4792]: I0929 19:21:29.754369 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f9a97d1e-25ce-4b67-a962-a40636f31649-catalog-content\") pod \"f9a97d1e-25ce-4b67-a962-a40636f31649\" (UID: \"f9a97d1e-25ce-4b67-a962-a40636f31649\") " Sep 29 19:21:29 crc kubenswrapper[4792]: I0929 19:21:29.754404 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f9a97d1e-25ce-4b67-a962-a40636f31649-utilities\") pod \"f9a97d1e-25ce-4b67-a962-a40636f31649\" (UID: \"f9a97d1e-25ce-4b67-a962-a40636f31649\") " Sep 29 19:21:29 crc kubenswrapper[4792]: I0929 19:21:29.754569 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gc9n9\" (UniqueName: \"kubernetes.io/projected/f9a97d1e-25ce-4b67-a962-a40636f31649-kube-api-access-gc9n9\") pod \"f9a97d1e-25ce-4b67-a962-a40636f31649\" (UID: \"f9a97d1e-25ce-4b67-a962-a40636f31649\") " Sep 29 19:21:29 crc kubenswrapper[4792]: I0929 19:21:29.755873 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f9a97d1e-25ce-4b67-a962-a40636f31649-utilities" (OuterVolumeSpecName: "utilities") pod "f9a97d1e-25ce-4b67-a962-a40636f31649" (UID: "f9a97d1e-25ce-4b67-a962-a40636f31649"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 19:21:29 crc kubenswrapper[4792]: I0929 19:21:29.761160 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f9a97d1e-25ce-4b67-a962-a40636f31649-kube-api-access-gc9n9" (OuterVolumeSpecName: "kube-api-access-gc9n9") pod "f9a97d1e-25ce-4b67-a962-a40636f31649" (UID: "f9a97d1e-25ce-4b67-a962-a40636f31649"). InnerVolumeSpecName "kube-api-access-gc9n9". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:21:29 crc kubenswrapper[4792]: I0929 19:21:29.829096 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f9a97d1e-25ce-4b67-a962-a40636f31649-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "f9a97d1e-25ce-4b67-a962-a40636f31649" (UID: "f9a97d1e-25ce-4b67-a962-a40636f31649"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 19:21:29 crc kubenswrapper[4792]: I0929 19:21:29.857223 4792 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f9a97d1e-25ce-4b67-a962-a40636f31649-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 19:21:29 crc kubenswrapper[4792]: I0929 19:21:29.857260 4792 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f9a97d1e-25ce-4b67-a962-a40636f31649-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 19:21:29 crc kubenswrapper[4792]: I0929 19:21:29.857273 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gc9n9\" (UniqueName: \"kubernetes.io/projected/f9a97d1e-25ce-4b67-a962-a40636f31649-kube-api-access-gc9n9\") on node \"crc\" DevicePath \"\"" Sep 29 19:21:30 crc kubenswrapper[4792]: I0929 19:21:30.219952 4792 generic.go:334] "Generic (PLEG): container finished" podID="f9a97d1e-25ce-4b67-a962-a40636f31649" containerID="cade52593c162191bcec6c7ac4e1a3cefd105b93ec2084a75c84094563ed5fd4" exitCode=0 Sep 29 19:21:30 crc kubenswrapper[4792]: I0929 19:21:30.220007 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-ztjfj" Sep 29 19:21:30 crc kubenswrapper[4792]: I0929 19:21:30.220025 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-ztjfj" event={"ID":"f9a97d1e-25ce-4b67-a962-a40636f31649","Type":"ContainerDied","Data":"cade52593c162191bcec6c7ac4e1a3cefd105b93ec2084a75c84094563ed5fd4"} Sep 29 19:21:30 crc kubenswrapper[4792]: I0929 19:21:30.220414 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-ztjfj" event={"ID":"f9a97d1e-25ce-4b67-a962-a40636f31649","Type":"ContainerDied","Data":"abb7be2047690e58cf50006c63be4201ba43ae349a324a7da96c1c4b95e1c1d9"} Sep 29 19:21:30 crc kubenswrapper[4792]: I0929 19:21:30.220435 4792 scope.go:117] "RemoveContainer" containerID="cade52593c162191bcec6c7ac4e1a3cefd105b93ec2084a75c84094563ed5fd4" Sep 29 19:21:30 crc kubenswrapper[4792]: I0929 19:21:30.253182 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-ztjfj"] Sep 29 19:21:30 crc kubenswrapper[4792]: I0929 19:21:30.254518 4792 scope.go:117] "RemoveContainer" containerID="92742c8687ddb36356593b821031f26ef49b982f5970913a041330e25606ce9e" Sep 29 19:21:30 crc kubenswrapper[4792]: I0929 19:21:30.261488 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-ztjfj"] Sep 29 19:21:30 crc kubenswrapper[4792]: I0929 19:21:30.274520 4792 scope.go:117] "RemoveContainer" containerID="15ddfe65af618eb344aefb16902026061d3291584a4cfd6f00a0422ab36072f1" Sep 29 19:21:30 crc kubenswrapper[4792]: I0929 19:21:30.317947 4792 scope.go:117] "RemoveContainer" containerID="cade52593c162191bcec6c7ac4e1a3cefd105b93ec2084a75c84094563ed5fd4" Sep 29 19:21:30 crc kubenswrapper[4792]: E0929 19:21:30.318451 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cade52593c162191bcec6c7ac4e1a3cefd105b93ec2084a75c84094563ed5fd4\": container with ID starting with cade52593c162191bcec6c7ac4e1a3cefd105b93ec2084a75c84094563ed5fd4 not found: ID does not exist" containerID="cade52593c162191bcec6c7ac4e1a3cefd105b93ec2084a75c84094563ed5fd4" Sep 29 19:21:30 crc kubenswrapper[4792]: I0929 19:21:30.318490 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cade52593c162191bcec6c7ac4e1a3cefd105b93ec2084a75c84094563ed5fd4"} err="failed to get container status \"cade52593c162191bcec6c7ac4e1a3cefd105b93ec2084a75c84094563ed5fd4\": rpc error: code = NotFound desc = could not find container \"cade52593c162191bcec6c7ac4e1a3cefd105b93ec2084a75c84094563ed5fd4\": container with ID starting with cade52593c162191bcec6c7ac4e1a3cefd105b93ec2084a75c84094563ed5fd4 not found: ID does not exist" Sep 29 19:21:30 crc kubenswrapper[4792]: I0929 19:21:30.318512 4792 scope.go:117] "RemoveContainer" containerID="92742c8687ddb36356593b821031f26ef49b982f5970913a041330e25606ce9e" Sep 29 19:21:30 crc kubenswrapper[4792]: E0929 19:21:30.318966 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"92742c8687ddb36356593b821031f26ef49b982f5970913a041330e25606ce9e\": container with ID starting with 92742c8687ddb36356593b821031f26ef49b982f5970913a041330e25606ce9e not found: ID does not exist" containerID="92742c8687ddb36356593b821031f26ef49b982f5970913a041330e25606ce9e" Sep 29 19:21:30 crc kubenswrapper[4792]: I0929 19:21:30.319021 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"92742c8687ddb36356593b821031f26ef49b982f5970913a041330e25606ce9e"} err="failed to get container status \"92742c8687ddb36356593b821031f26ef49b982f5970913a041330e25606ce9e\": rpc error: code = NotFound desc = could not find container \"92742c8687ddb36356593b821031f26ef49b982f5970913a041330e25606ce9e\": container with ID starting with 92742c8687ddb36356593b821031f26ef49b982f5970913a041330e25606ce9e not found: ID does not exist" Sep 29 19:21:30 crc kubenswrapper[4792]: I0929 19:21:30.319058 4792 scope.go:117] "RemoveContainer" containerID="15ddfe65af618eb344aefb16902026061d3291584a4cfd6f00a0422ab36072f1" Sep 29 19:21:30 crc kubenswrapper[4792]: E0929 19:21:30.319367 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"15ddfe65af618eb344aefb16902026061d3291584a4cfd6f00a0422ab36072f1\": container with ID starting with 15ddfe65af618eb344aefb16902026061d3291584a4cfd6f00a0422ab36072f1 not found: ID does not exist" containerID="15ddfe65af618eb344aefb16902026061d3291584a4cfd6f00a0422ab36072f1" Sep 29 19:21:30 crc kubenswrapper[4792]: I0929 19:21:30.319394 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"15ddfe65af618eb344aefb16902026061d3291584a4cfd6f00a0422ab36072f1"} err="failed to get container status \"15ddfe65af618eb344aefb16902026061d3291584a4cfd6f00a0422ab36072f1\": rpc error: code = NotFound desc = could not find container \"15ddfe65af618eb344aefb16902026061d3291584a4cfd6f00a0422ab36072f1\": container with ID starting with 15ddfe65af618eb344aefb16902026061d3291584a4cfd6f00a0422ab36072f1 not found: ID does not exist" Sep 29 19:21:31 crc kubenswrapper[4792]: I0929 19:21:31.026017 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f9a97d1e-25ce-4b67-a962-a40636f31649" path="/var/lib/kubelet/pods/f9a97d1e-25ce-4b67-a962-a40636f31649/volumes" Sep 29 19:21:35 crc kubenswrapper[4792]: I0929 19:21:35.518014 4792 scope.go:117] "RemoveContainer" containerID="d72f423cf06a89a52dbdb61789a4ada9f3ad5c2e26307d79817fdcd764d326aa" Sep 29 19:21:35 crc kubenswrapper[4792]: I0929 19:21:35.553298 4792 scope.go:117] "RemoveContainer" containerID="60de474f64d8ceab7bd04a696c276c4daae767384d55d5faf43ab7a04cb6e95a" Sep 29 19:23:41 crc kubenswrapper[4792]: I0929 19:23:41.960358 4792 patch_prober.go:28] interesting pod/machine-config-daemon-p5q59 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 19:23:41 crc kubenswrapper[4792]: I0929 19:23:41.961089 4792 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" podUID="0ae66548-086e-4ca9-bd6f-281ce46e7557" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 19:24:11 crc kubenswrapper[4792]: I0929 19:24:11.959498 4792 patch_prober.go:28] interesting pod/machine-config-daemon-p5q59 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 19:24:11 crc kubenswrapper[4792]: I0929 19:24:11.960000 4792 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" podUID="0ae66548-086e-4ca9-bd6f-281ce46e7557" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 19:24:33 crc kubenswrapper[4792]: I0929 19:24:33.048938 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-db-create-bvxvv"] Sep 29 19:24:33 crc kubenswrapper[4792]: I0929 19:24:33.056638 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-db-create-bhpdb"] Sep 29 19:24:33 crc kubenswrapper[4792]: I0929 19:24:33.068121 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-db-create-bvxvv"] Sep 29 19:24:33 crc kubenswrapper[4792]: I0929 19:24:33.084697 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-db-create-bhpdb"] Sep 29 19:24:35 crc kubenswrapper[4792]: I0929 19:24:35.030470 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="80431414-82a8-41e6-b6d4-d9e23639c6a3" path="/var/lib/kubelet/pods/80431414-82a8-41e6-b6d4-d9e23639c6a3/volumes" Sep 29 19:24:35 crc kubenswrapper[4792]: I0929 19:24:35.031159 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="903007b3-34eb-437b-adf1-ec511ad037ad" path="/var/lib/kubelet/pods/903007b3-34eb-437b-adf1-ec511ad037ad/volumes" Sep 29 19:24:35 crc kubenswrapper[4792]: I0929 19:24:35.755533 4792 scope.go:117] "RemoveContainer" containerID="b7dd0ce8ce59435410436b2e79bfbdcdeded61430c1bb76f80c2feaf8a3be826" Sep 29 19:24:35 crc kubenswrapper[4792]: I0929 19:24:35.783019 4792 scope.go:117] "RemoveContainer" containerID="8db4ea34f4e7b8929509c6d3a65341ac25ecfe70677a497947e4a3ab6e35f7fc" Sep 29 19:24:35 crc kubenswrapper[4792]: I0929 19:24:35.807813 4792 scope.go:117] "RemoveContainer" containerID="1081e48c669d84e83b06df9d9d775cc07869a324aa1810d929d32bac72816f3f" Sep 29 19:24:35 crc kubenswrapper[4792]: I0929 19:24:35.835316 4792 scope.go:117] "RemoveContainer" containerID="3d81bcab52a676c2e3edc508229d88c51f2289895c25906ee61ca55a6e620310" Sep 29 19:24:35 crc kubenswrapper[4792]: I0929 19:24:35.859666 4792 scope.go:117] "RemoveContainer" containerID="1e0d58a3d084433691acd1399be7988139b2165da593e3708db3c700482262d0" Sep 29 19:24:38 crc kubenswrapper[4792]: I0929 19:24:38.004196 4792 generic.go:334] "Generic (PLEG): container finished" podID="3dbdb326-a5bc-4d53-b4cc-6971b8a715e6" containerID="e89f3e72703ecd76d08408d5d7ac60a91ff2ceb362a7d965a99d540077a017c5" exitCode=0 Sep 29 19:24:38 crc kubenswrapper[4792]: I0929 19:24:38.004294 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-slpnn" event={"ID":"3dbdb326-a5bc-4d53-b4cc-6971b8a715e6","Type":"ContainerDied","Data":"e89f3e72703ecd76d08408d5d7ac60a91ff2ceb362a7d965a99d540077a017c5"} Sep 29 19:24:39 crc kubenswrapper[4792]: I0929 19:24:39.416617 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-slpnn" Sep 29 19:24:39 crc kubenswrapper[4792]: I0929 19:24:39.508249 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/3dbdb326-a5bc-4d53-b4cc-6971b8a715e6-ssh-key\") pod \"3dbdb326-a5bc-4d53-b4cc-6971b8a715e6\" (UID: \"3dbdb326-a5bc-4d53-b4cc-6971b8a715e6\") " Sep 29 19:24:39 crc kubenswrapper[4792]: I0929 19:24:39.508488 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xpbw2\" (UniqueName: \"kubernetes.io/projected/3dbdb326-a5bc-4d53-b4cc-6971b8a715e6-kube-api-access-xpbw2\") pod \"3dbdb326-a5bc-4d53-b4cc-6971b8a715e6\" (UID: \"3dbdb326-a5bc-4d53-b4cc-6971b8a715e6\") " Sep 29 19:24:39 crc kubenswrapper[4792]: I0929 19:24:39.508505 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/3dbdb326-a5bc-4d53-b4cc-6971b8a715e6-inventory\") pod \"3dbdb326-a5bc-4d53-b4cc-6971b8a715e6\" (UID: \"3dbdb326-a5bc-4d53-b4cc-6971b8a715e6\") " Sep 29 19:24:39 crc kubenswrapper[4792]: I0929 19:24:39.508605 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3dbdb326-a5bc-4d53-b4cc-6971b8a715e6-bootstrap-combined-ca-bundle\") pod \"3dbdb326-a5bc-4d53-b4cc-6971b8a715e6\" (UID: \"3dbdb326-a5bc-4d53-b4cc-6971b8a715e6\") " Sep 29 19:24:39 crc kubenswrapper[4792]: I0929 19:24:39.513714 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3dbdb326-a5bc-4d53-b4cc-6971b8a715e6-bootstrap-combined-ca-bundle" (OuterVolumeSpecName: "bootstrap-combined-ca-bundle") pod "3dbdb326-a5bc-4d53-b4cc-6971b8a715e6" (UID: "3dbdb326-a5bc-4d53-b4cc-6971b8a715e6"). InnerVolumeSpecName "bootstrap-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:24:39 crc kubenswrapper[4792]: I0929 19:24:39.515864 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3dbdb326-a5bc-4d53-b4cc-6971b8a715e6-kube-api-access-xpbw2" (OuterVolumeSpecName: "kube-api-access-xpbw2") pod "3dbdb326-a5bc-4d53-b4cc-6971b8a715e6" (UID: "3dbdb326-a5bc-4d53-b4cc-6971b8a715e6"). InnerVolumeSpecName "kube-api-access-xpbw2". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:24:39 crc kubenswrapper[4792]: I0929 19:24:39.536451 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3dbdb326-a5bc-4d53-b4cc-6971b8a715e6-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "3dbdb326-a5bc-4d53-b4cc-6971b8a715e6" (UID: "3dbdb326-a5bc-4d53-b4cc-6971b8a715e6"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:24:39 crc kubenswrapper[4792]: I0929 19:24:39.543005 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3dbdb326-a5bc-4d53-b4cc-6971b8a715e6-inventory" (OuterVolumeSpecName: "inventory") pod "3dbdb326-a5bc-4d53-b4cc-6971b8a715e6" (UID: "3dbdb326-a5bc-4d53-b4cc-6971b8a715e6"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:24:39 crc kubenswrapper[4792]: I0929 19:24:39.611058 4792 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/3dbdb326-a5bc-4d53-b4cc-6971b8a715e6-ssh-key\") on node \"crc\" DevicePath \"\"" Sep 29 19:24:39 crc kubenswrapper[4792]: I0929 19:24:39.611093 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xpbw2\" (UniqueName: \"kubernetes.io/projected/3dbdb326-a5bc-4d53-b4cc-6971b8a715e6-kube-api-access-xpbw2\") on node \"crc\" DevicePath \"\"" Sep 29 19:24:39 crc kubenswrapper[4792]: I0929 19:24:39.611105 4792 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/3dbdb326-a5bc-4d53-b4cc-6971b8a715e6-inventory\") on node \"crc\" DevicePath \"\"" Sep 29 19:24:39 crc kubenswrapper[4792]: I0929 19:24:39.611114 4792 reconciler_common.go:293] "Volume detached for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3dbdb326-a5bc-4d53-b4cc-6971b8a715e6-bootstrap-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 19:24:40 crc kubenswrapper[4792]: I0929 19:24:40.024607 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-slpnn" event={"ID":"3dbdb326-a5bc-4d53-b4cc-6971b8a715e6","Type":"ContainerDied","Data":"df0ea212ecda2872e909b30174edea4ff517c1b1fc93cebf2f178d8c61d983ee"} Sep 29 19:24:40 crc kubenswrapper[4792]: I0929 19:24:40.024648 4792 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="df0ea212ecda2872e909b30174edea4ff517c1b1fc93cebf2f178d8c61d983ee" Sep 29 19:24:40 crc kubenswrapper[4792]: I0929 19:24:40.024665 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-slpnn" Sep 29 19:24:40 crc kubenswrapper[4792]: I0929 19:24:40.108881 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/download-cache-edpm-deployment-openstack-edpm-ipam-b8wjj"] Sep 29 19:24:40 crc kubenswrapper[4792]: E0929 19:24:40.109274 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3dbdb326-a5bc-4d53-b4cc-6971b8a715e6" containerName="bootstrap-edpm-deployment-openstack-edpm-ipam" Sep 29 19:24:40 crc kubenswrapper[4792]: I0929 19:24:40.109290 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="3dbdb326-a5bc-4d53-b4cc-6971b8a715e6" containerName="bootstrap-edpm-deployment-openstack-edpm-ipam" Sep 29 19:24:40 crc kubenswrapper[4792]: E0929 19:24:40.109310 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f9a97d1e-25ce-4b67-a962-a40636f31649" containerName="extract-utilities" Sep 29 19:24:40 crc kubenswrapper[4792]: I0929 19:24:40.109317 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="f9a97d1e-25ce-4b67-a962-a40636f31649" containerName="extract-utilities" Sep 29 19:24:40 crc kubenswrapper[4792]: E0929 19:24:40.109330 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f9a97d1e-25ce-4b67-a962-a40636f31649" containerName="registry-server" Sep 29 19:24:40 crc kubenswrapper[4792]: I0929 19:24:40.109336 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="f9a97d1e-25ce-4b67-a962-a40636f31649" containerName="registry-server" Sep 29 19:24:40 crc kubenswrapper[4792]: E0929 19:24:40.109361 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f9a97d1e-25ce-4b67-a962-a40636f31649" containerName="extract-content" Sep 29 19:24:40 crc kubenswrapper[4792]: I0929 19:24:40.109367 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="f9a97d1e-25ce-4b67-a962-a40636f31649" containerName="extract-content" Sep 29 19:24:40 crc kubenswrapper[4792]: I0929 19:24:40.109549 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="3dbdb326-a5bc-4d53-b4cc-6971b8a715e6" containerName="bootstrap-edpm-deployment-openstack-edpm-ipam" Sep 29 19:24:40 crc kubenswrapper[4792]: I0929 19:24:40.109575 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="f9a97d1e-25ce-4b67-a962-a40636f31649" containerName="registry-server" Sep 29 19:24:40 crc kubenswrapper[4792]: I0929 19:24:40.110175 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-b8wjj" Sep 29 19:24:40 crc kubenswrapper[4792]: I0929 19:24:40.112015 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-jvdxd" Sep 29 19:24:40 crc kubenswrapper[4792]: I0929 19:24:40.112091 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Sep 29 19:24:40 crc kubenswrapper[4792]: I0929 19:24:40.116186 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Sep 29 19:24:40 crc kubenswrapper[4792]: I0929 19:24:40.116365 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Sep 29 19:24:40 crc kubenswrapper[4792]: I0929 19:24:40.119934 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/download-cache-edpm-deployment-openstack-edpm-ipam-b8wjj"] Sep 29 19:24:40 crc kubenswrapper[4792]: I0929 19:24:40.221251 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tdb87\" (UniqueName: \"kubernetes.io/projected/c90a0d9d-bf42-4d49-9527-e859ffce83a0-kube-api-access-tdb87\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-b8wjj\" (UID: \"c90a0d9d-bf42-4d49-9527-e859ffce83a0\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-b8wjj" Sep 29 19:24:40 crc kubenswrapper[4792]: I0929 19:24:40.221302 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c90a0d9d-bf42-4d49-9527-e859ffce83a0-inventory\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-b8wjj\" (UID: \"c90a0d9d-bf42-4d49-9527-e859ffce83a0\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-b8wjj" Sep 29 19:24:40 crc kubenswrapper[4792]: I0929 19:24:40.221545 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c90a0d9d-bf42-4d49-9527-e859ffce83a0-ssh-key\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-b8wjj\" (UID: \"c90a0d9d-bf42-4d49-9527-e859ffce83a0\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-b8wjj" Sep 29 19:24:40 crc kubenswrapper[4792]: I0929 19:24:40.323153 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tdb87\" (UniqueName: \"kubernetes.io/projected/c90a0d9d-bf42-4d49-9527-e859ffce83a0-kube-api-access-tdb87\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-b8wjj\" (UID: \"c90a0d9d-bf42-4d49-9527-e859ffce83a0\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-b8wjj" Sep 29 19:24:40 crc kubenswrapper[4792]: I0929 19:24:40.323200 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c90a0d9d-bf42-4d49-9527-e859ffce83a0-inventory\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-b8wjj\" (UID: \"c90a0d9d-bf42-4d49-9527-e859ffce83a0\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-b8wjj" Sep 29 19:24:40 crc kubenswrapper[4792]: I0929 19:24:40.323266 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c90a0d9d-bf42-4d49-9527-e859ffce83a0-ssh-key\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-b8wjj\" (UID: \"c90a0d9d-bf42-4d49-9527-e859ffce83a0\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-b8wjj" Sep 29 19:24:40 crc kubenswrapper[4792]: I0929 19:24:40.326582 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c90a0d9d-bf42-4d49-9527-e859ffce83a0-ssh-key\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-b8wjj\" (UID: \"c90a0d9d-bf42-4d49-9527-e859ffce83a0\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-b8wjj" Sep 29 19:24:40 crc kubenswrapper[4792]: I0929 19:24:40.328552 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c90a0d9d-bf42-4d49-9527-e859ffce83a0-inventory\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-b8wjj\" (UID: \"c90a0d9d-bf42-4d49-9527-e859ffce83a0\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-b8wjj" Sep 29 19:24:40 crc kubenswrapper[4792]: I0929 19:24:40.341373 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tdb87\" (UniqueName: \"kubernetes.io/projected/c90a0d9d-bf42-4d49-9527-e859ffce83a0-kube-api-access-tdb87\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-b8wjj\" (UID: \"c90a0d9d-bf42-4d49-9527-e859ffce83a0\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-b8wjj" Sep 29 19:24:40 crc kubenswrapper[4792]: I0929 19:24:40.428064 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-b8wjj" Sep 29 19:24:40 crc kubenswrapper[4792]: I0929 19:24:40.898742 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/download-cache-edpm-deployment-openstack-edpm-ipam-b8wjj"] Sep 29 19:24:40 crc kubenswrapper[4792]: W0929 19:24:40.907156 4792 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc90a0d9d_bf42_4d49_9527_e859ffce83a0.slice/crio-643c706791bdd8f0f9443b8ada0d6fc8936fe411ab48afe71305ecbf1a2909ce WatchSource:0}: Error finding container 643c706791bdd8f0f9443b8ada0d6fc8936fe411ab48afe71305ecbf1a2909ce: Status 404 returned error can't find the container with id 643c706791bdd8f0f9443b8ada0d6fc8936fe411ab48afe71305ecbf1a2909ce Sep 29 19:24:40 crc kubenswrapper[4792]: I0929 19:24:40.909998 4792 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Sep 29 19:24:41 crc kubenswrapper[4792]: I0929 19:24:41.033376 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-b8wjj" event={"ID":"c90a0d9d-bf42-4d49-9527-e859ffce83a0","Type":"ContainerStarted","Data":"643c706791bdd8f0f9443b8ada0d6fc8936fe411ab48afe71305ecbf1a2909ce"} Sep 29 19:24:41 crc kubenswrapper[4792]: I0929 19:24:41.960154 4792 patch_prober.go:28] interesting pod/machine-config-daemon-p5q59 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 19:24:41 crc kubenswrapper[4792]: I0929 19:24:41.960529 4792 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" podUID="0ae66548-086e-4ca9-bd6f-281ce46e7557" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 19:24:41 crc kubenswrapper[4792]: I0929 19:24:41.960580 4792 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" Sep 29 19:24:41 crc kubenswrapper[4792]: I0929 19:24:41.961544 4792 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"ccfabdbbd2fc28db3a7759f30ddf4fbe532580d663aea81dbf9d9f716c69f3f7"} pod="openshift-machine-config-operator/machine-config-daemon-p5q59" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 29 19:24:41 crc kubenswrapper[4792]: I0929 19:24:41.961608 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" podUID="0ae66548-086e-4ca9-bd6f-281ce46e7557" containerName="machine-config-daemon" containerID="cri-o://ccfabdbbd2fc28db3a7759f30ddf4fbe532580d663aea81dbf9d9f716c69f3f7" gracePeriod=600 Sep 29 19:24:42 crc kubenswrapper[4792]: I0929 19:24:42.044995 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-b8wjj" event={"ID":"c90a0d9d-bf42-4d49-9527-e859ffce83a0","Type":"ContainerStarted","Data":"46b93ef4c13808e6ba854fa43e617cf41fc9a1e4ddd747106d559ad049243ab5"} Sep 29 19:24:42 crc kubenswrapper[4792]: I0929 19:24:42.063055 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-b8wjj" podStartSLOduration=1.9136139509999999 podStartE2EDuration="2.063032887s" podCreationTimestamp="2025-09-29 19:24:40 +0000 UTC" firstStartedPulling="2025-09-29 19:24:40.909765822 +0000 UTC m=+1692.903073218" lastFinishedPulling="2025-09-29 19:24:41.059184758 +0000 UTC m=+1693.052492154" observedRunningTime="2025-09-29 19:24:42.060052949 +0000 UTC m=+1694.053360365" watchObservedRunningTime="2025-09-29 19:24:42.063032887 +0000 UTC m=+1694.056340283" Sep 29 19:24:42 crc kubenswrapper[4792]: E0929 19:24:42.104821 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p5q59_openshift-machine-config-operator(0ae66548-086e-4ca9-bd6f-281ce46e7557)\"" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" podUID="0ae66548-086e-4ca9-bd6f-281ce46e7557" Sep 29 19:24:43 crc kubenswrapper[4792]: I0929 19:24:43.057344 4792 generic.go:334] "Generic (PLEG): container finished" podID="0ae66548-086e-4ca9-bd6f-281ce46e7557" containerID="ccfabdbbd2fc28db3a7759f30ddf4fbe532580d663aea81dbf9d9f716c69f3f7" exitCode=0 Sep 29 19:24:43 crc kubenswrapper[4792]: I0929 19:24:43.057430 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" event={"ID":"0ae66548-086e-4ca9-bd6f-281ce46e7557","Type":"ContainerDied","Data":"ccfabdbbd2fc28db3a7759f30ddf4fbe532580d663aea81dbf9d9f716c69f3f7"} Sep 29 19:24:43 crc kubenswrapper[4792]: I0929 19:24:43.057490 4792 scope.go:117] "RemoveContainer" containerID="4da2f3e8ceb0a8e1e559272a172c2d5b11ff46e91e3ba55c40264756f850c284" Sep 29 19:24:43 crc kubenswrapper[4792]: I0929 19:24:43.058515 4792 scope.go:117] "RemoveContainer" containerID="ccfabdbbd2fc28db3a7759f30ddf4fbe532580d663aea81dbf9d9f716c69f3f7" Sep 29 19:24:43 crc kubenswrapper[4792]: E0929 19:24:43.059002 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p5q59_openshift-machine-config-operator(0ae66548-086e-4ca9-bd6f-281ce46e7557)\"" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" podUID="0ae66548-086e-4ca9-bd6f-281ce46e7557" Sep 29 19:24:44 crc kubenswrapper[4792]: I0929 19:24:44.029680 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-db-create-5qvbx"] Sep 29 19:24:44 crc kubenswrapper[4792]: I0929 19:24:44.036237 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-4622-account-create-kqskg"] Sep 29 19:24:44 crc kubenswrapper[4792]: I0929 19:24:44.044389 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-019a-account-create-22pm5"] Sep 29 19:24:44 crc kubenswrapper[4792]: I0929 19:24:44.052730 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-db-create-5qvbx"] Sep 29 19:24:44 crc kubenswrapper[4792]: I0929 19:24:44.062697 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-4622-account-create-kqskg"] Sep 29 19:24:44 crc kubenswrapper[4792]: I0929 19:24:44.070302 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-019a-account-create-22pm5"] Sep 29 19:24:45 crc kubenswrapper[4792]: I0929 19:24:45.029562 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="42ef9ed2-647c-4dd1-aca8-625f68ad7a15" path="/var/lib/kubelet/pods/42ef9ed2-647c-4dd1-aca8-625f68ad7a15/volumes" Sep 29 19:24:45 crc kubenswrapper[4792]: I0929 19:24:45.030631 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="eb43922b-132e-4ce3-8004-d0fddc9e7c80" path="/var/lib/kubelet/pods/eb43922b-132e-4ce3-8004-d0fddc9e7c80/volumes" Sep 29 19:24:45 crc kubenswrapper[4792]: I0929 19:24:45.034160 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f309741e-5733-4b16-ba3b-f354fca03459" path="/var/lib/kubelet/pods/f309741e-5733-4b16-ba3b-f354fca03459/volumes" Sep 29 19:24:54 crc kubenswrapper[4792]: I0929 19:24:54.015166 4792 scope.go:117] "RemoveContainer" containerID="ccfabdbbd2fc28db3a7759f30ddf4fbe532580d663aea81dbf9d9f716c69f3f7" Sep 29 19:24:54 crc kubenswrapper[4792]: E0929 19:24:54.015963 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p5q59_openshift-machine-config-operator(0ae66548-086e-4ca9-bd6f-281ce46e7557)\"" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" podUID="0ae66548-086e-4ca9-bd6f-281ce46e7557" Sep 29 19:25:04 crc kubenswrapper[4792]: I0929 19:25:04.053947 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-860c-account-create-rqnbc"] Sep 29 19:25:04 crc kubenswrapper[4792]: I0929 19:25:04.068482 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-db-create-lzctq"] Sep 29 19:25:04 crc kubenswrapper[4792]: I0929 19:25:04.078210 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-db-create-lvkq7"] Sep 29 19:25:04 crc kubenswrapper[4792]: I0929 19:25:04.085292 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-db-create-xpptn"] Sep 29 19:25:04 crc kubenswrapper[4792]: I0929 19:25:04.092000 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-db-create-lvkq7"] Sep 29 19:25:04 crc kubenswrapper[4792]: I0929 19:25:04.099707 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-860c-account-create-rqnbc"] Sep 29 19:25:04 crc kubenswrapper[4792]: I0929 19:25:04.108547 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-db-create-xpptn"] Sep 29 19:25:04 crc kubenswrapper[4792]: I0929 19:25:04.115924 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-db-create-lzctq"] Sep 29 19:25:05 crc kubenswrapper[4792]: I0929 19:25:05.025272 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="11ca06bb-7266-4314-b094-0b34f5531fda" path="/var/lib/kubelet/pods/11ca06bb-7266-4314-b094-0b34f5531fda/volumes" Sep 29 19:25:05 crc kubenswrapper[4792]: I0929 19:25:05.028558 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2711541c-2b88-4ca2-9519-5ca1e8ebb8a6" path="/var/lib/kubelet/pods/2711541c-2b88-4ca2-9519-5ca1e8ebb8a6/volumes" Sep 29 19:25:05 crc kubenswrapper[4792]: I0929 19:25:05.030672 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="acba234d-48c9-44b8-99ed-ce58f33c8dbd" path="/var/lib/kubelet/pods/acba234d-48c9-44b8-99ed-ce58f33c8dbd/volumes" Sep 29 19:25:05 crc kubenswrapper[4792]: I0929 19:25:05.034128 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c4720cde-c7b5-4bf3-9aff-8daee7fe6211" path="/var/lib/kubelet/pods/c4720cde-c7b5-4bf3-9aff-8daee7fe6211/volumes" Sep 29 19:25:09 crc kubenswrapper[4792]: I0929 19:25:09.022147 4792 scope.go:117] "RemoveContainer" containerID="ccfabdbbd2fc28db3a7759f30ddf4fbe532580d663aea81dbf9d9f716c69f3f7" Sep 29 19:25:09 crc kubenswrapper[4792]: E0929 19:25:09.022793 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p5q59_openshift-machine-config-operator(0ae66548-086e-4ca9-bd6f-281ce46e7557)\"" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" podUID="0ae66548-086e-4ca9-bd6f-281ce46e7557" Sep 29 19:25:13 crc kubenswrapper[4792]: I0929 19:25:13.028586 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-db-sync-hr5x7"] Sep 29 19:25:13 crc kubenswrapper[4792]: I0929 19:25:13.040548 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-db-sync-hr5x7"] Sep 29 19:25:15 crc kubenswrapper[4792]: I0929 19:25:15.025422 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f52c7bda-8340-49b2-9f94-2767ab141a81" path="/var/lib/kubelet/pods/f52c7bda-8340-49b2-9f94-2767ab141a81/volumes" Sep 29 19:25:20 crc kubenswrapper[4792]: I0929 19:25:20.036713 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-1bfc-account-create-dfjx4"] Sep 29 19:25:20 crc kubenswrapper[4792]: I0929 19:25:20.048186 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-2e1f-account-create-zc864"] Sep 29 19:25:20 crc kubenswrapper[4792]: I0929 19:25:20.080114 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-9d5d-account-create-4l8xs"] Sep 29 19:25:20 crc kubenswrapper[4792]: I0929 19:25:20.091417 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-1bfc-account-create-dfjx4"] Sep 29 19:25:20 crc kubenswrapper[4792]: I0929 19:25:20.099456 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-2e1f-account-create-zc864"] Sep 29 19:25:20 crc kubenswrapper[4792]: I0929 19:25:20.107212 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-9d5d-account-create-4l8xs"] Sep 29 19:25:21 crc kubenswrapper[4792]: I0929 19:25:21.030248 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="305096ef-8cf0-4061-8153-17d2bbcb9e2b" path="/var/lib/kubelet/pods/305096ef-8cf0-4061-8153-17d2bbcb9e2b/volumes" Sep 29 19:25:21 crc kubenswrapper[4792]: I0929 19:25:21.031614 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="43a6587c-7796-4d5d-9f91-cff1dc257b26" path="/var/lib/kubelet/pods/43a6587c-7796-4d5d-9f91-cff1dc257b26/volumes" Sep 29 19:25:21 crc kubenswrapper[4792]: I0929 19:25:21.033077 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9d341539-6c17-493b-bd37-e03f9e186459" path="/var/lib/kubelet/pods/9d341539-6c17-493b-bd37-e03f9e186459/volumes" Sep 29 19:25:22 crc kubenswrapper[4792]: I0929 19:25:22.032808 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-db-sync-9mkf6"] Sep 29 19:25:22 crc kubenswrapper[4792]: I0929 19:25:22.043670 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-db-sync-9mkf6"] Sep 29 19:25:23 crc kubenswrapper[4792]: I0929 19:25:23.038818 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ef8285b6-60ee-4b53-bb6a-ffb4c9a6bb1e" path="/var/lib/kubelet/pods/ef8285b6-60ee-4b53-bb6a-ffb4c9a6bb1e/volumes" Sep 29 19:25:24 crc kubenswrapper[4792]: I0929 19:25:24.015210 4792 scope.go:117] "RemoveContainer" containerID="ccfabdbbd2fc28db3a7759f30ddf4fbe532580d663aea81dbf9d9f716c69f3f7" Sep 29 19:25:24 crc kubenswrapper[4792]: E0929 19:25:24.015819 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p5q59_openshift-machine-config-operator(0ae66548-086e-4ca9-bd6f-281ce46e7557)\"" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" podUID="0ae66548-086e-4ca9-bd6f-281ce46e7557" Sep 29 19:25:36 crc kubenswrapper[4792]: I0929 19:25:36.006384 4792 scope.go:117] "RemoveContainer" containerID="e9c8879923b9d4a0ef247f86c02262810d140dbd1dfc395d1a47142fdc899868" Sep 29 19:25:36 crc kubenswrapper[4792]: I0929 19:25:36.031476 4792 scope.go:117] "RemoveContainer" containerID="5a7358fbfdb6b20ff36b0d3f3562868f8dc011eadd6b3d2525f2ddb6a6fd5939" Sep 29 19:25:36 crc kubenswrapper[4792]: I0929 19:25:36.079928 4792 scope.go:117] "RemoveContainer" containerID="574e87433c33c96465cfa0f0b6655078ed2e11e589a1246263fbe17665512d46" Sep 29 19:25:36 crc kubenswrapper[4792]: I0929 19:25:36.131388 4792 scope.go:117] "RemoveContainer" containerID="d63440b09d488d13b24a4c4f5f3881e107e8b29f3efd5358d84538a35eb37e77" Sep 29 19:25:36 crc kubenswrapper[4792]: I0929 19:25:36.166132 4792 scope.go:117] "RemoveContainer" containerID="af1cf705ed7ff0a8d8a3545337f7f1cc64c316a00f78da41cd59272bab8ae4aa" Sep 29 19:25:36 crc kubenswrapper[4792]: I0929 19:25:36.225510 4792 scope.go:117] "RemoveContainer" containerID="ddc9d28107fa91cdf470c188847390a41b636284fe7fd5d3c085cf389a63d428" Sep 29 19:25:36 crc kubenswrapper[4792]: I0929 19:25:36.263521 4792 scope.go:117] "RemoveContainer" containerID="f3e4079c2dc0435bfa60ed75b4ff57bd896c9cde399f1fac9497685694618adb" Sep 29 19:25:36 crc kubenswrapper[4792]: I0929 19:25:36.289140 4792 scope.go:117] "RemoveContainer" containerID="ff133d042b720fee1687e13a12024ebbae6d9dea5a29474449cbe5667c9bff5e" Sep 29 19:25:36 crc kubenswrapper[4792]: I0929 19:25:36.335176 4792 scope.go:117] "RemoveContainer" containerID="5a0b18fcc7d14c9991621e948f72d3109f320e075f37cc23c34f2fd7db77c3f7" Sep 29 19:25:36 crc kubenswrapper[4792]: I0929 19:25:36.358507 4792 scope.go:117] "RemoveContainer" containerID="87f8eb721d8b61854fb6eb07a02c7a138769598716b60771d768cb9b3442fcb6" Sep 29 19:25:36 crc kubenswrapper[4792]: I0929 19:25:36.381330 4792 scope.go:117] "RemoveContainer" containerID="ac5855c219ce190769c0ea0b9d071a413c64e2c845189e35bf197ccbdb65ccf1" Sep 29 19:25:36 crc kubenswrapper[4792]: I0929 19:25:36.411719 4792 scope.go:117] "RemoveContainer" containerID="5deab9e68e296346f4cb638c1c76c1e575af121f11caef29da0e3a18b2f684c8" Sep 29 19:25:39 crc kubenswrapper[4792]: I0929 19:25:39.023267 4792 scope.go:117] "RemoveContainer" containerID="ccfabdbbd2fc28db3a7759f30ddf4fbe532580d663aea81dbf9d9f716c69f3f7" Sep 29 19:25:39 crc kubenswrapper[4792]: E0929 19:25:39.023672 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p5q59_openshift-machine-config-operator(0ae66548-086e-4ca9-bd6f-281ce46e7557)\"" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" podUID="0ae66548-086e-4ca9-bd6f-281ce46e7557" Sep 29 19:25:54 crc kubenswrapper[4792]: I0929 19:25:54.016388 4792 scope.go:117] "RemoveContainer" containerID="ccfabdbbd2fc28db3a7759f30ddf4fbe532580d663aea81dbf9d9f716c69f3f7" Sep 29 19:25:54 crc kubenswrapper[4792]: E0929 19:25:54.017325 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p5q59_openshift-machine-config-operator(0ae66548-086e-4ca9-bd6f-281ce46e7557)\"" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" podUID="0ae66548-086e-4ca9-bd6f-281ce46e7557" Sep 29 19:25:59 crc kubenswrapper[4792]: I0929 19:25:59.046118 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-db-sync-j6jb8"] Sep 29 19:25:59 crc kubenswrapper[4792]: I0929 19:25:59.053605 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-db-sync-j6jb8"] Sep 29 19:26:01 crc kubenswrapper[4792]: I0929 19:26:01.028939 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="72bc741a-4542-48fb-b65c-c7a12570d80a" path="/var/lib/kubelet/pods/72bc741a-4542-48fb-b65c-c7a12570d80a/volumes" Sep 29 19:26:08 crc kubenswrapper[4792]: I0929 19:26:08.016130 4792 scope.go:117] "RemoveContainer" containerID="ccfabdbbd2fc28db3a7759f30ddf4fbe532580d663aea81dbf9d9f716c69f3f7" Sep 29 19:26:08 crc kubenswrapper[4792]: E0929 19:26:08.016964 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p5q59_openshift-machine-config-operator(0ae66548-086e-4ca9-bd6f-281ce46e7557)\"" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" podUID="0ae66548-086e-4ca9-bd6f-281ce46e7557" Sep 29 19:26:14 crc kubenswrapper[4792]: I0929 19:26:14.041868 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-db-sync-qwkmr"] Sep 29 19:26:14 crc kubenswrapper[4792]: I0929 19:26:14.052207 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-db-sync-5bsn5"] Sep 29 19:26:14 crc kubenswrapper[4792]: I0929 19:26:14.059679 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-db-sync-qwkmr"] Sep 29 19:26:14 crc kubenswrapper[4792]: I0929 19:26:14.068570 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-db-sync-5bsn5"] Sep 29 19:26:14 crc kubenswrapper[4792]: I0929 19:26:14.075155 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-bootstrap-5vlr7"] Sep 29 19:26:14 crc kubenswrapper[4792]: I0929 19:26:14.081575 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-bootstrap-5vlr7"] Sep 29 19:26:15 crc kubenswrapper[4792]: I0929 19:26:15.027150 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1d12f836-d8e3-46a3-bc92-64dae426f114" path="/var/lib/kubelet/pods/1d12f836-d8e3-46a3-bc92-64dae426f114/volumes" Sep 29 19:26:15 crc kubenswrapper[4792]: I0929 19:26:15.028292 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3c3fc253-fb19-4845-a099-4754b7a55cdb" path="/var/lib/kubelet/pods/3c3fc253-fb19-4845-a099-4754b7a55cdb/volumes" Sep 29 19:26:15 crc kubenswrapper[4792]: I0929 19:26:15.028923 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b45ae86a-64ce-47be-a568-021cf9da5107" path="/var/lib/kubelet/pods/b45ae86a-64ce-47be-a568-021cf9da5107/volumes" Sep 29 19:26:20 crc kubenswrapper[4792]: I0929 19:26:20.015746 4792 scope.go:117] "RemoveContainer" containerID="ccfabdbbd2fc28db3a7759f30ddf4fbe532580d663aea81dbf9d9f716c69f3f7" Sep 29 19:26:20 crc kubenswrapper[4792]: E0929 19:26:20.016568 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p5q59_openshift-machine-config-operator(0ae66548-086e-4ca9-bd6f-281ce46e7557)\"" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" podUID="0ae66548-086e-4ca9-bd6f-281ce46e7557" Sep 29 19:26:26 crc kubenswrapper[4792]: I0929 19:26:26.089020 4792 generic.go:334] "Generic (PLEG): container finished" podID="c90a0d9d-bf42-4d49-9527-e859ffce83a0" containerID="46b93ef4c13808e6ba854fa43e617cf41fc9a1e4ddd747106d559ad049243ab5" exitCode=0 Sep 29 19:26:26 crc kubenswrapper[4792]: I0929 19:26:26.089144 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-b8wjj" event={"ID":"c90a0d9d-bf42-4d49-9527-e859ffce83a0","Type":"ContainerDied","Data":"46b93ef4c13808e6ba854fa43e617cf41fc9a1e4ddd747106d559ad049243ab5"} Sep 29 19:26:27 crc kubenswrapper[4792]: I0929 19:26:27.515645 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-b8wjj" Sep 29 19:26:27 crc kubenswrapper[4792]: I0929 19:26:27.578016 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tdb87\" (UniqueName: \"kubernetes.io/projected/c90a0d9d-bf42-4d49-9527-e859ffce83a0-kube-api-access-tdb87\") pod \"c90a0d9d-bf42-4d49-9527-e859ffce83a0\" (UID: \"c90a0d9d-bf42-4d49-9527-e859ffce83a0\") " Sep 29 19:26:27 crc kubenswrapper[4792]: I0929 19:26:27.578199 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c90a0d9d-bf42-4d49-9527-e859ffce83a0-ssh-key\") pod \"c90a0d9d-bf42-4d49-9527-e859ffce83a0\" (UID: \"c90a0d9d-bf42-4d49-9527-e859ffce83a0\") " Sep 29 19:26:27 crc kubenswrapper[4792]: I0929 19:26:27.578438 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c90a0d9d-bf42-4d49-9527-e859ffce83a0-inventory\") pod \"c90a0d9d-bf42-4d49-9527-e859ffce83a0\" (UID: \"c90a0d9d-bf42-4d49-9527-e859ffce83a0\") " Sep 29 19:26:27 crc kubenswrapper[4792]: I0929 19:26:27.583314 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c90a0d9d-bf42-4d49-9527-e859ffce83a0-kube-api-access-tdb87" (OuterVolumeSpecName: "kube-api-access-tdb87") pod "c90a0d9d-bf42-4d49-9527-e859ffce83a0" (UID: "c90a0d9d-bf42-4d49-9527-e859ffce83a0"). InnerVolumeSpecName "kube-api-access-tdb87". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:26:27 crc kubenswrapper[4792]: I0929 19:26:27.604268 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c90a0d9d-bf42-4d49-9527-e859ffce83a0-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "c90a0d9d-bf42-4d49-9527-e859ffce83a0" (UID: "c90a0d9d-bf42-4d49-9527-e859ffce83a0"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:26:27 crc kubenswrapper[4792]: I0929 19:26:27.623008 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c90a0d9d-bf42-4d49-9527-e859ffce83a0-inventory" (OuterVolumeSpecName: "inventory") pod "c90a0d9d-bf42-4d49-9527-e859ffce83a0" (UID: "c90a0d9d-bf42-4d49-9527-e859ffce83a0"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:26:27 crc kubenswrapper[4792]: I0929 19:26:27.680158 4792 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c90a0d9d-bf42-4d49-9527-e859ffce83a0-inventory\") on node \"crc\" DevicePath \"\"" Sep 29 19:26:27 crc kubenswrapper[4792]: I0929 19:26:27.680186 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tdb87\" (UniqueName: \"kubernetes.io/projected/c90a0d9d-bf42-4d49-9527-e859ffce83a0-kube-api-access-tdb87\") on node \"crc\" DevicePath \"\"" Sep 29 19:26:27 crc kubenswrapper[4792]: I0929 19:26:27.680196 4792 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c90a0d9d-bf42-4d49-9527-e859ffce83a0-ssh-key\") on node \"crc\" DevicePath \"\"" Sep 29 19:26:28 crc kubenswrapper[4792]: I0929 19:26:28.108456 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-b8wjj" event={"ID":"c90a0d9d-bf42-4d49-9527-e859ffce83a0","Type":"ContainerDied","Data":"643c706791bdd8f0f9443b8ada0d6fc8936fe411ab48afe71305ecbf1a2909ce"} Sep 29 19:26:28 crc kubenswrapper[4792]: I0929 19:26:28.108764 4792 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="643c706791bdd8f0f9443b8ada0d6fc8936fe411ab48afe71305ecbf1a2909ce" Sep 29 19:26:28 crc kubenswrapper[4792]: I0929 19:26:28.108494 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-b8wjj" Sep 29 19:26:28 crc kubenswrapper[4792]: I0929 19:26:28.268931 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/configure-network-edpm-deployment-openstack-edpm-ipam-6xccv"] Sep 29 19:26:28 crc kubenswrapper[4792]: E0929 19:26:28.269296 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c90a0d9d-bf42-4d49-9527-e859ffce83a0" containerName="download-cache-edpm-deployment-openstack-edpm-ipam" Sep 29 19:26:28 crc kubenswrapper[4792]: I0929 19:26:28.269314 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="c90a0d9d-bf42-4d49-9527-e859ffce83a0" containerName="download-cache-edpm-deployment-openstack-edpm-ipam" Sep 29 19:26:28 crc kubenswrapper[4792]: I0929 19:26:28.269504 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="c90a0d9d-bf42-4d49-9527-e859ffce83a0" containerName="download-cache-edpm-deployment-openstack-edpm-ipam" Sep 29 19:26:28 crc kubenswrapper[4792]: I0929 19:26:28.270144 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-6xccv" Sep 29 19:26:28 crc kubenswrapper[4792]: I0929 19:26:28.272606 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Sep 29 19:26:28 crc kubenswrapper[4792]: I0929 19:26:28.272860 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Sep 29 19:26:28 crc kubenswrapper[4792]: I0929 19:26:28.273144 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Sep 29 19:26:28 crc kubenswrapper[4792]: I0929 19:26:28.273178 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-jvdxd" Sep 29 19:26:28 crc kubenswrapper[4792]: I0929 19:26:28.286257 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-network-edpm-deployment-openstack-edpm-ipam-6xccv"] Sep 29 19:26:28 crc kubenswrapper[4792]: I0929 19:26:28.402371 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/723e4395-18dd-4729-be31-1c5ccf8e7ec8-inventory\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-6xccv\" (UID: \"723e4395-18dd-4729-be31-1c5ccf8e7ec8\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-6xccv" Sep 29 19:26:28 crc kubenswrapper[4792]: I0929 19:26:28.402464 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gmd6n\" (UniqueName: \"kubernetes.io/projected/723e4395-18dd-4729-be31-1c5ccf8e7ec8-kube-api-access-gmd6n\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-6xccv\" (UID: \"723e4395-18dd-4729-be31-1c5ccf8e7ec8\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-6xccv" Sep 29 19:26:28 crc kubenswrapper[4792]: I0929 19:26:28.402551 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/723e4395-18dd-4729-be31-1c5ccf8e7ec8-ssh-key\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-6xccv\" (UID: \"723e4395-18dd-4729-be31-1c5ccf8e7ec8\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-6xccv" Sep 29 19:26:28 crc kubenswrapper[4792]: I0929 19:26:28.504326 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gmd6n\" (UniqueName: \"kubernetes.io/projected/723e4395-18dd-4729-be31-1c5ccf8e7ec8-kube-api-access-gmd6n\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-6xccv\" (UID: \"723e4395-18dd-4729-be31-1c5ccf8e7ec8\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-6xccv" Sep 29 19:26:28 crc kubenswrapper[4792]: I0929 19:26:28.504405 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/723e4395-18dd-4729-be31-1c5ccf8e7ec8-ssh-key\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-6xccv\" (UID: \"723e4395-18dd-4729-be31-1c5ccf8e7ec8\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-6xccv" Sep 29 19:26:28 crc kubenswrapper[4792]: I0929 19:26:28.504537 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/723e4395-18dd-4729-be31-1c5ccf8e7ec8-inventory\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-6xccv\" (UID: \"723e4395-18dd-4729-be31-1c5ccf8e7ec8\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-6xccv" Sep 29 19:26:28 crc kubenswrapper[4792]: I0929 19:26:28.518547 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/723e4395-18dd-4729-be31-1c5ccf8e7ec8-inventory\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-6xccv\" (UID: \"723e4395-18dd-4729-be31-1c5ccf8e7ec8\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-6xccv" Sep 29 19:26:28 crc kubenswrapper[4792]: I0929 19:26:28.519592 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/723e4395-18dd-4729-be31-1c5ccf8e7ec8-ssh-key\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-6xccv\" (UID: \"723e4395-18dd-4729-be31-1c5ccf8e7ec8\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-6xccv" Sep 29 19:26:28 crc kubenswrapper[4792]: I0929 19:26:28.523994 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gmd6n\" (UniqueName: \"kubernetes.io/projected/723e4395-18dd-4729-be31-1c5ccf8e7ec8-kube-api-access-gmd6n\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-6xccv\" (UID: \"723e4395-18dd-4729-be31-1c5ccf8e7ec8\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-6xccv" Sep 29 19:26:28 crc kubenswrapper[4792]: I0929 19:26:28.595815 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-6xccv" Sep 29 19:26:29 crc kubenswrapper[4792]: I0929 19:26:29.103606 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-network-edpm-deployment-openstack-edpm-ipam-6xccv"] Sep 29 19:26:29 crc kubenswrapper[4792]: I0929 19:26:29.120036 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-6xccv" event={"ID":"723e4395-18dd-4729-be31-1c5ccf8e7ec8","Type":"ContainerStarted","Data":"9cfd956664095551d09ee3d9a20c467181dde26b535c0a7f8a6e1ddf298e896e"} Sep 29 19:26:29 crc kubenswrapper[4792]: I0929 19:26:29.291278 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Sep 29 19:26:30 crc kubenswrapper[4792]: I0929 19:26:30.131180 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-6xccv" event={"ID":"723e4395-18dd-4729-be31-1c5ccf8e7ec8","Type":"ContainerStarted","Data":"b707dce76d47c3f5f3fe7494b4313001cb8a41dd8a4242b64cf1874334029873"} Sep 29 19:26:30 crc kubenswrapper[4792]: I0929 19:26:30.152775 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-6xccv" podStartSLOduration=1.976718752 podStartE2EDuration="2.152750837s" podCreationTimestamp="2025-09-29 19:26:28 +0000 UTC" firstStartedPulling="2025-09-29 19:26:29.112283806 +0000 UTC m=+1801.105591212" lastFinishedPulling="2025-09-29 19:26:29.288315881 +0000 UTC m=+1801.281623297" observedRunningTime="2025-09-29 19:26:30.150872608 +0000 UTC m=+1802.144180024" watchObservedRunningTime="2025-09-29 19:26:30.152750837 +0000 UTC m=+1802.146058253" Sep 29 19:26:32 crc kubenswrapper[4792]: I0929 19:26:32.016435 4792 scope.go:117] "RemoveContainer" containerID="ccfabdbbd2fc28db3a7759f30ddf4fbe532580d663aea81dbf9d9f716c69f3f7" Sep 29 19:26:32 crc kubenswrapper[4792]: E0929 19:26:32.017313 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p5q59_openshift-machine-config-operator(0ae66548-086e-4ca9-bd6f-281ce46e7557)\"" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" podUID="0ae66548-086e-4ca9-bd6f-281ce46e7557" Sep 29 19:26:32 crc kubenswrapper[4792]: I0929 19:26:32.062851 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-db-sync-cvlgm"] Sep 29 19:26:32 crc kubenswrapper[4792]: I0929 19:26:32.076831 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-db-sync-cvlgm"] Sep 29 19:26:33 crc kubenswrapper[4792]: I0929 19:26:33.035519 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bd0405ab-8be9-41cd-aa4d-7cbe44be3049" path="/var/lib/kubelet/pods/bd0405ab-8be9-41cd-aa4d-7cbe44be3049/volumes" Sep 29 19:26:36 crc kubenswrapper[4792]: I0929 19:26:36.668012 4792 scope.go:117] "RemoveContainer" containerID="458f42dbafa406e8067bc4be0137d6c9a4b875c15a1507d9300b9941e91ba73f" Sep 29 19:26:36 crc kubenswrapper[4792]: I0929 19:26:36.706964 4792 scope.go:117] "RemoveContainer" containerID="6a5885959acd64b15fe20cfb3870e826808b5a9916e3c522a51fba99a1bcbcf7" Sep 29 19:26:36 crc kubenswrapper[4792]: I0929 19:26:36.770747 4792 scope.go:117] "RemoveContainer" containerID="669c7cc1f802e3c741ab812ebea14d9a705e2cf6d0558e155f535ddf3ec6e8fd" Sep 29 19:26:36 crc kubenswrapper[4792]: I0929 19:26:36.831553 4792 scope.go:117] "RemoveContainer" containerID="3feeb60329406e8936b0b75192cd78cb94d59aca2dca3a89c2f5d7219e9eeb9a" Sep 29 19:26:36 crc kubenswrapper[4792]: I0929 19:26:36.885478 4792 scope.go:117] "RemoveContainer" containerID="d603761937a93c10f270ab88e50f1fcce9c85642279a25c8fc9caf4866875be0" Sep 29 19:26:42 crc kubenswrapper[4792]: I0929 19:26:42.647012 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-rf2dx"] Sep 29 19:26:42 crc kubenswrapper[4792]: I0929 19:26:42.649674 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-rf2dx" Sep 29 19:26:42 crc kubenswrapper[4792]: I0929 19:26:42.680189 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-rf2dx"] Sep 29 19:26:42 crc kubenswrapper[4792]: I0929 19:26:42.709411 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0ad88da0-baf6-45a4-9a5a-cb6ace790db8-catalog-content\") pod \"redhat-operators-rf2dx\" (UID: \"0ad88da0-baf6-45a4-9a5a-cb6ace790db8\") " pod="openshift-marketplace/redhat-operators-rf2dx" Sep 29 19:26:42 crc kubenswrapper[4792]: I0929 19:26:42.709503 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0ad88da0-baf6-45a4-9a5a-cb6ace790db8-utilities\") pod \"redhat-operators-rf2dx\" (UID: \"0ad88da0-baf6-45a4-9a5a-cb6ace790db8\") " pod="openshift-marketplace/redhat-operators-rf2dx" Sep 29 19:26:42 crc kubenswrapper[4792]: I0929 19:26:42.709543 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cpsh6\" (UniqueName: \"kubernetes.io/projected/0ad88da0-baf6-45a4-9a5a-cb6ace790db8-kube-api-access-cpsh6\") pod \"redhat-operators-rf2dx\" (UID: \"0ad88da0-baf6-45a4-9a5a-cb6ace790db8\") " pod="openshift-marketplace/redhat-operators-rf2dx" Sep 29 19:26:42 crc kubenswrapper[4792]: I0929 19:26:42.810518 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cpsh6\" (UniqueName: \"kubernetes.io/projected/0ad88da0-baf6-45a4-9a5a-cb6ace790db8-kube-api-access-cpsh6\") pod \"redhat-operators-rf2dx\" (UID: \"0ad88da0-baf6-45a4-9a5a-cb6ace790db8\") " pod="openshift-marketplace/redhat-operators-rf2dx" Sep 29 19:26:42 crc kubenswrapper[4792]: I0929 19:26:42.810647 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0ad88da0-baf6-45a4-9a5a-cb6ace790db8-catalog-content\") pod \"redhat-operators-rf2dx\" (UID: \"0ad88da0-baf6-45a4-9a5a-cb6ace790db8\") " pod="openshift-marketplace/redhat-operators-rf2dx" Sep 29 19:26:42 crc kubenswrapper[4792]: I0929 19:26:42.810759 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0ad88da0-baf6-45a4-9a5a-cb6ace790db8-utilities\") pod \"redhat-operators-rf2dx\" (UID: \"0ad88da0-baf6-45a4-9a5a-cb6ace790db8\") " pod="openshift-marketplace/redhat-operators-rf2dx" Sep 29 19:26:42 crc kubenswrapper[4792]: I0929 19:26:42.811310 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0ad88da0-baf6-45a4-9a5a-cb6ace790db8-catalog-content\") pod \"redhat-operators-rf2dx\" (UID: \"0ad88da0-baf6-45a4-9a5a-cb6ace790db8\") " pod="openshift-marketplace/redhat-operators-rf2dx" Sep 29 19:26:42 crc kubenswrapper[4792]: I0929 19:26:42.811424 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0ad88da0-baf6-45a4-9a5a-cb6ace790db8-utilities\") pod \"redhat-operators-rf2dx\" (UID: \"0ad88da0-baf6-45a4-9a5a-cb6ace790db8\") " pod="openshift-marketplace/redhat-operators-rf2dx" Sep 29 19:26:42 crc kubenswrapper[4792]: I0929 19:26:42.837810 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cpsh6\" (UniqueName: \"kubernetes.io/projected/0ad88da0-baf6-45a4-9a5a-cb6ace790db8-kube-api-access-cpsh6\") pod \"redhat-operators-rf2dx\" (UID: \"0ad88da0-baf6-45a4-9a5a-cb6ace790db8\") " pod="openshift-marketplace/redhat-operators-rf2dx" Sep 29 19:26:42 crc kubenswrapper[4792]: I0929 19:26:42.971067 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-rf2dx" Sep 29 19:26:43 crc kubenswrapper[4792]: I0929 19:26:43.455624 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-rf2dx"] Sep 29 19:26:44 crc kubenswrapper[4792]: I0929 19:26:44.015623 4792 scope.go:117] "RemoveContainer" containerID="ccfabdbbd2fc28db3a7759f30ddf4fbe532580d663aea81dbf9d9f716c69f3f7" Sep 29 19:26:44 crc kubenswrapper[4792]: E0929 19:26:44.016084 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p5q59_openshift-machine-config-operator(0ae66548-086e-4ca9-bd6f-281ce46e7557)\"" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" podUID="0ae66548-086e-4ca9-bd6f-281ce46e7557" Sep 29 19:26:44 crc kubenswrapper[4792]: I0929 19:26:44.269321 4792 generic.go:334] "Generic (PLEG): container finished" podID="0ad88da0-baf6-45a4-9a5a-cb6ace790db8" containerID="b83f40888ad487031db1af0e7dd2b6bae5b926ac59434033d41afdc51c24c885" exitCode=0 Sep 29 19:26:44 crc kubenswrapper[4792]: I0929 19:26:44.269713 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rf2dx" event={"ID":"0ad88da0-baf6-45a4-9a5a-cb6ace790db8","Type":"ContainerDied","Data":"b83f40888ad487031db1af0e7dd2b6bae5b926ac59434033d41afdc51c24c885"} Sep 29 19:26:44 crc kubenswrapper[4792]: I0929 19:26:44.269743 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rf2dx" event={"ID":"0ad88da0-baf6-45a4-9a5a-cb6ace790db8","Type":"ContainerStarted","Data":"9f3d67e37c1c6791f959b4855367ac171c449dd7677278e2921c4d0343b6f7bf"} Sep 29 19:26:45 crc kubenswrapper[4792]: I0929 19:26:45.282818 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rf2dx" event={"ID":"0ad88da0-baf6-45a4-9a5a-cb6ace790db8","Type":"ContainerStarted","Data":"c1de3bd10254d54edfae37c3ab1cc17a81510adc80e73ece077bb59e904e6129"} Sep 29 19:26:50 crc kubenswrapper[4792]: I0929 19:26:50.323021 4792 generic.go:334] "Generic (PLEG): container finished" podID="0ad88da0-baf6-45a4-9a5a-cb6ace790db8" containerID="c1de3bd10254d54edfae37c3ab1cc17a81510adc80e73ece077bb59e904e6129" exitCode=0 Sep 29 19:26:50 crc kubenswrapper[4792]: I0929 19:26:50.323126 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rf2dx" event={"ID":"0ad88da0-baf6-45a4-9a5a-cb6ace790db8","Type":"ContainerDied","Data":"c1de3bd10254d54edfae37c3ab1cc17a81510adc80e73ece077bb59e904e6129"} Sep 29 19:26:51 crc kubenswrapper[4792]: I0929 19:26:51.334358 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rf2dx" event={"ID":"0ad88da0-baf6-45a4-9a5a-cb6ace790db8","Type":"ContainerStarted","Data":"7f223205820631edcf72af12954d66130d5152d8a828520e9ebd3558fc0dc9c5"} Sep 29 19:26:51 crc kubenswrapper[4792]: I0929 19:26:51.357910 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-rf2dx" podStartSLOduration=2.876439985 podStartE2EDuration="9.357892633s" podCreationTimestamp="2025-09-29 19:26:42 +0000 UTC" firstStartedPulling="2025-09-29 19:26:44.27195946 +0000 UTC m=+1816.265266866" lastFinishedPulling="2025-09-29 19:26:50.753412118 +0000 UTC m=+1822.746719514" observedRunningTime="2025-09-29 19:26:51.356460845 +0000 UTC m=+1823.349768241" watchObservedRunningTime="2025-09-29 19:26:51.357892633 +0000 UTC m=+1823.351200019" Sep 29 19:26:52 crc kubenswrapper[4792]: I0929 19:26:52.972745 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-rf2dx" Sep 29 19:26:52 crc kubenswrapper[4792]: I0929 19:26:52.973145 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-rf2dx" Sep 29 19:26:54 crc kubenswrapper[4792]: I0929 19:26:54.018237 4792 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-rf2dx" podUID="0ad88da0-baf6-45a4-9a5a-cb6ace790db8" containerName="registry-server" probeResult="failure" output=< Sep 29 19:26:54 crc kubenswrapper[4792]: timeout: failed to connect service ":50051" within 1s Sep 29 19:26:54 crc kubenswrapper[4792]: > Sep 29 19:26:55 crc kubenswrapper[4792]: I0929 19:26:55.019778 4792 scope.go:117] "RemoveContainer" containerID="ccfabdbbd2fc28db3a7759f30ddf4fbe532580d663aea81dbf9d9f716c69f3f7" Sep 29 19:26:55 crc kubenswrapper[4792]: E0929 19:26:55.020356 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p5q59_openshift-machine-config-operator(0ae66548-086e-4ca9-bd6f-281ce46e7557)\"" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" podUID="0ae66548-086e-4ca9-bd6f-281ce46e7557" Sep 29 19:27:03 crc kubenswrapper[4792]: I0929 19:27:03.024430 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-rf2dx" Sep 29 19:27:03 crc kubenswrapper[4792]: I0929 19:27:03.069624 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-rf2dx" Sep 29 19:27:03 crc kubenswrapper[4792]: I0929 19:27:03.266541 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-rf2dx"] Sep 29 19:27:04 crc kubenswrapper[4792]: I0929 19:27:04.445759 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-rf2dx" podUID="0ad88da0-baf6-45a4-9a5a-cb6ace790db8" containerName="registry-server" containerID="cri-o://7f223205820631edcf72af12954d66130d5152d8a828520e9ebd3558fc0dc9c5" gracePeriod=2 Sep 29 19:27:04 crc kubenswrapper[4792]: I0929 19:27:04.891710 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-rf2dx" Sep 29 19:27:05 crc kubenswrapper[4792]: I0929 19:27:05.027039 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0ad88da0-baf6-45a4-9a5a-cb6ace790db8-catalog-content\") pod \"0ad88da0-baf6-45a4-9a5a-cb6ace790db8\" (UID: \"0ad88da0-baf6-45a4-9a5a-cb6ace790db8\") " Sep 29 19:27:05 crc kubenswrapper[4792]: I0929 19:27:05.027238 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0ad88da0-baf6-45a4-9a5a-cb6ace790db8-utilities\") pod \"0ad88da0-baf6-45a4-9a5a-cb6ace790db8\" (UID: \"0ad88da0-baf6-45a4-9a5a-cb6ace790db8\") " Sep 29 19:27:05 crc kubenswrapper[4792]: I0929 19:27:05.027268 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cpsh6\" (UniqueName: \"kubernetes.io/projected/0ad88da0-baf6-45a4-9a5a-cb6ace790db8-kube-api-access-cpsh6\") pod \"0ad88da0-baf6-45a4-9a5a-cb6ace790db8\" (UID: \"0ad88da0-baf6-45a4-9a5a-cb6ace790db8\") " Sep 29 19:27:05 crc kubenswrapper[4792]: I0929 19:27:05.027733 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0ad88da0-baf6-45a4-9a5a-cb6ace790db8-utilities" (OuterVolumeSpecName: "utilities") pod "0ad88da0-baf6-45a4-9a5a-cb6ace790db8" (UID: "0ad88da0-baf6-45a4-9a5a-cb6ace790db8"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 19:27:05 crc kubenswrapper[4792]: I0929 19:27:05.042170 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0ad88da0-baf6-45a4-9a5a-cb6ace790db8-kube-api-access-cpsh6" (OuterVolumeSpecName: "kube-api-access-cpsh6") pod "0ad88da0-baf6-45a4-9a5a-cb6ace790db8" (UID: "0ad88da0-baf6-45a4-9a5a-cb6ace790db8"). InnerVolumeSpecName "kube-api-access-cpsh6". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:27:05 crc kubenswrapper[4792]: I0929 19:27:05.118516 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0ad88da0-baf6-45a4-9a5a-cb6ace790db8-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "0ad88da0-baf6-45a4-9a5a-cb6ace790db8" (UID: "0ad88da0-baf6-45a4-9a5a-cb6ace790db8"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 19:27:05 crc kubenswrapper[4792]: I0929 19:27:05.130289 4792 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0ad88da0-baf6-45a4-9a5a-cb6ace790db8-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 19:27:05 crc kubenswrapper[4792]: I0929 19:27:05.130323 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cpsh6\" (UniqueName: \"kubernetes.io/projected/0ad88da0-baf6-45a4-9a5a-cb6ace790db8-kube-api-access-cpsh6\") on node \"crc\" DevicePath \"\"" Sep 29 19:27:05 crc kubenswrapper[4792]: I0929 19:27:05.130335 4792 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0ad88da0-baf6-45a4-9a5a-cb6ace790db8-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 19:27:05 crc kubenswrapper[4792]: I0929 19:27:05.466765 4792 generic.go:334] "Generic (PLEG): container finished" podID="0ad88da0-baf6-45a4-9a5a-cb6ace790db8" containerID="7f223205820631edcf72af12954d66130d5152d8a828520e9ebd3558fc0dc9c5" exitCode=0 Sep 29 19:27:05 crc kubenswrapper[4792]: I0929 19:27:05.466814 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rf2dx" event={"ID":"0ad88da0-baf6-45a4-9a5a-cb6ace790db8","Type":"ContainerDied","Data":"7f223205820631edcf72af12954d66130d5152d8a828520e9ebd3558fc0dc9c5"} Sep 29 19:27:05 crc kubenswrapper[4792]: I0929 19:27:05.466843 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rf2dx" event={"ID":"0ad88da0-baf6-45a4-9a5a-cb6ace790db8","Type":"ContainerDied","Data":"9f3d67e37c1c6791f959b4855367ac171c449dd7677278e2921c4d0343b6f7bf"} Sep 29 19:27:05 crc kubenswrapper[4792]: I0929 19:27:05.466881 4792 scope.go:117] "RemoveContainer" containerID="7f223205820631edcf72af12954d66130d5152d8a828520e9ebd3558fc0dc9c5" Sep 29 19:27:05 crc kubenswrapper[4792]: I0929 19:27:05.466880 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-rf2dx" Sep 29 19:27:05 crc kubenswrapper[4792]: I0929 19:27:05.490830 4792 scope.go:117] "RemoveContainer" containerID="c1de3bd10254d54edfae37c3ab1cc17a81510adc80e73ece077bb59e904e6129" Sep 29 19:27:05 crc kubenswrapper[4792]: I0929 19:27:05.514095 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-rf2dx"] Sep 29 19:27:05 crc kubenswrapper[4792]: I0929 19:27:05.521593 4792 scope.go:117] "RemoveContainer" containerID="b83f40888ad487031db1af0e7dd2b6bae5b926ac59434033d41afdc51c24c885" Sep 29 19:27:05 crc kubenswrapper[4792]: I0929 19:27:05.523688 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-rf2dx"] Sep 29 19:27:05 crc kubenswrapper[4792]: I0929 19:27:05.576355 4792 scope.go:117] "RemoveContainer" containerID="7f223205820631edcf72af12954d66130d5152d8a828520e9ebd3558fc0dc9c5" Sep 29 19:27:05 crc kubenswrapper[4792]: E0929 19:27:05.576839 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7f223205820631edcf72af12954d66130d5152d8a828520e9ebd3558fc0dc9c5\": container with ID starting with 7f223205820631edcf72af12954d66130d5152d8a828520e9ebd3558fc0dc9c5 not found: ID does not exist" containerID="7f223205820631edcf72af12954d66130d5152d8a828520e9ebd3558fc0dc9c5" Sep 29 19:27:05 crc kubenswrapper[4792]: I0929 19:27:05.576976 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7f223205820631edcf72af12954d66130d5152d8a828520e9ebd3558fc0dc9c5"} err="failed to get container status \"7f223205820631edcf72af12954d66130d5152d8a828520e9ebd3558fc0dc9c5\": rpc error: code = NotFound desc = could not find container \"7f223205820631edcf72af12954d66130d5152d8a828520e9ebd3558fc0dc9c5\": container with ID starting with 7f223205820631edcf72af12954d66130d5152d8a828520e9ebd3558fc0dc9c5 not found: ID does not exist" Sep 29 19:27:05 crc kubenswrapper[4792]: I0929 19:27:05.577066 4792 scope.go:117] "RemoveContainer" containerID="c1de3bd10254d54edfae37c3ab1cc17a81510adc80e73ece077bb59e904e6129" Sep 29 19:27:05 crc kubenswrapper[4792]: E0929 19:27:05.577558 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c1de3bd10254d54edfae37c3ab1cc17a81510adc80e73ece077bb59e904e6129\": container with ID starting with c1de3bd10254d54edfae37c3ab1cc17a81510adc80e73ece077bb59e904e6129 not found: ID does not exist" containerID="c1de3bd10254d54edfae37c3ab1cc17a81510adc80e73ece077bb59e904e6129" Sep 29 19:27:05 crc kubenswrapper[4792]: I0929 19:27:05.577656 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c1de3bd10254d54edfae37c3ab1cc17a81510adc80e73ece077bb59e904e6129"} err="failed to get container status \"c1de3bd10254d54edfae37c3ab1cc17a81510adc80e73ece077bb59e904e6129\": rpc error: code = NotFound desc = could not find container \"c1de3bd10254d54edfae37c3ab1cc17a81510adc80e73ece077bb59e904e6129\": container with ID starting with c1de3bd10254d54edfae37c3ab1cc17a81510adc80e73ece077bb59e904e6129 not found: ID does not exist" Sep 29 19:27:05 crc kubenswrapper[4792]: I0929 19:27:05.577750 4792 scope.go:117] "RemoveContainer" containerID="b83f40888ad487031db1af0e7dd2b6bae5b926ac59434033d41afdc51c24c885" Sep 29 19:27:05 crc kubenswrapper[4792]: E0929 19:27:05.578024 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b83f40888ad487031db1af0e7dd2b6bae5b926ac59434033d41afdc51c24c885\": container with ID starting with b83f40888ad487031db1af0e7dd2b6bae5b926ac59434033d41afdc51c24c885 not found: ID does not exist" containerID="b83f40888ad487031db1af0e7dd2b6bae5b926ac59434033d41afdc51c24c885" Sep 29 19:27:05 crc kubenswrapper[4792]: I0929 19:27:05.578123 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b83f40888ad487031db1af0e7dd2b6bae5b926ac59434033d41afdc51c24c885"} err="failed to get container status \"b83f40888ad487031db1af0e7dd2b6bae5b926ac59434033d41afdc51c24c885\": rpc error: code = NotFound desc = could not find container \"b83f40888ad487031db1af0e7dd2b6bae5b926ac59434033d41afdc51c24c885\": container with ID starting with b83f40888ad487031db1af0e7dd2b6bae5b926ac59434033d41afdc51c24c885 not found: ID does not exist" Sep 29 19:27:07 crc kubenswrapper[4792]: I0929 19:27:07.026597 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0ad88da0-baf6-45a4-9a5a-cb6ace790db8" path="/var/lib/kubelet/pods/0ad88da0-baf6-45a4-9a5a-cb6ace790db8/volumes" Sep 29 19:27:09 crc kubenswrapper[4792]: I0929 19:27:09.023146 4792 scope.go:117] "RemoveContainer" containerID="ccfabdbbd2fc28db3a7759f30ddf4fbe532580d663aea81dbf9d9f716c69f3f7" Sep 29 19:27:09 crc kubenswrapper[4792]: E0929 19:27:09.024657 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p5q59_openshift-machine-config-operator(0ae66548-086e-4ca9-bd6f-281ce46e7557)\"" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" podUID="0ae66548-086e-4ca9-bd6f-281ce46e7557" Sep 29 19:27:24 crc kubenswrapper[4792]: I0929 19:27:24.015233 4792 scope.go:117] "RemoveContainer" containerID="ccfabdbbd2fc28db3a7759f30ddf4fbe532580d663aea81dbf9d9f716c69f3f7" Sep 29 19:27:24 crc kubenswrapper[4792]: E0929 19:27:24.016013 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p5q59_openshift-machine-config-operator(0ae66548-086e-4ca9-bd6f-281ce46e7557)\"" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" podUID="0ae66548-086e-4ca9-bd6f-281ce46e7557" Sep 29 19:27:28 crc kubenswrapper[4792]: I0929 19:27:28.049556 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-db-create-zsqnn"] Sep 29 19:27:28 crc kubenswrapper[4792]: I0929 19:27:28.057823 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-db-create-d6db5"] Sep 29 19:27:28 crc kubenswrapper[4792]: I0929 19:27:28.067010 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-db-create-nb9z4"] Sep 29 19:27:28 crc kubenswrapper[4792]: I0929 19:27:28.074403 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-db-create-nb9z4"] Sep 29 19:27:28 crc kubenswrapper[4792]: I0929 19:27:28.081497 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-db-create-d6db5"] Sep 29 19:27:28 crc kubenswrapper[4792]: I0929 19:27:28.089799 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-db-create-zsqnn"] Sep 29 19:27:29 crc kubenswrapper[4792]: I0929 19:27:29.024768 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0fe5fafb-0225-461a-9d77-b301c7459ec5" path="/var/lib/kubelet/pods/0fe5fafb-0225-461a-9d77-b301c7459ec5/volumes" Sep 29 19:27:29 crc kubenswrapper[4792]: I0929 19:27:29.025468 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6205e51a-b274-4cb0-8e37-4d094352a317" path="/var/lib/kubelet/pods/6205e51a-b274-4cb0-8e37-4d094352a317/volumes" Sep 29 19:27:29 crc kubenswrapper[4792]: I0929 19:27:29.026477 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a9c562dc-e14c-4288-92c0-c03095f3d7a1" path="/var/lib/kubelet/pods/a9c562dc-e14c-4288-92c0-c03095f3d7a1/volumes" Sep 29 19:27:37 crc kubenswrapper[4792]: I0929 19:27:37.035800 4792 scope.go:117] "RemoveContainer" containerID="f0d4f77c8a55c9b252bc37c845fad8b83b4218e93ef5b362d62aed78c07f838d" Sep 29 19:27:37 crc kubenswrapper[4792]: I0929 19:27:37.065210 4792 scope.go:117] "RemoveContainer" containerID="757fa550617bf08b87a28c7008d19bcf691e256201ad1203a0fe098b6bc70946" Sep 29 19:27:37 crc kubenswrapper[4792]: I0929 19:27:37.106148 4792 scope.go:117] "RemoveContainer" containerID="6a0351fe58d5cc3e2344b58251acc870e52d5bae09ac185787b4c0b41e9a1874" Sep 29 19:27:38 crc kubenswrapper[4792]: I0929 19:27:38.015499 4792 scope.go:117] "RemoveContainer" containerID="ccfabdbbd2fc28db3a7759f30ddf4fbe532580d663aea81dbf9d9f716c69f3f7" Sep 29 19:27:38 crc kubenswrapper[4792]: E0929 19:27:38.015726 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p5q59_openshift-machine-config-operator(0ae66548-086e-4ca9-bd6f-281ce46e7557)\"" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" podUID="0ae66548-086e-4ca9-bd6f-281ce46e7557" Sep 29 19:27:38 crc kubenswrapper[4792]: I0929 19:27:38.035680 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-db58-account-create-5gg6m"] Sep 29 19:27:38 crc kubenswrapper[4792]: I0929 19:27:38.042659 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-e097-account-create-qvhth"] Sep 29 19:27:38 crc kubenswrapper[4792]: I0929 19:27:38.049423 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-db58-account-create-5gg6m"] Sep 29 19:27:38 crc kubenswrapper[4792]: I0929 19:27:38.057196 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-8647-account-create-j4tgd"] Sep 29 19:27:38 crc kubenswrapper[4792]: I0929 19:27:38.064055 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-e097-account-create-qvhth"] Sep 29 19:27:38 crc kubenswrapper[4792]: I0929 19:27:38.070589 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-8647-account-create-j4tgd"] Sep 29 19:27:39 crc kubenswrapper[4792]: I0929 19:27:39.026179 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bc7f0299-1915-487b-8cf3-fb9215143c3e" path="/var/lib/kubelet/pods/bc7f0299-1915-487b-8cf3-fb9215143c3e/volumes" Sep 29 19:27:39 crc kubenswrapper[4792]: I0929 19:27:39.026888 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c4df94be-b881-4404-a0e4-6f02a72ff60d" path="/var/lib/kubelet/pods/c4df94be-b881-4404-a0e4-6f02a72ff60d/volumes" Sep 29 19:27:39 crc kubenswrapper[4792]: I0929 19:27:39.027492 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e485ebed-017b-48d0-bb72-24571bb0ba2e" path="/var/lib/kubelet/pods/e485ebed-017b-48d0-bb72-24571bb0ba2e/volumes" Sep 29 19:27:45 crc kubenswrapper[4792]: I0929 19:27:45.856176 4792 generic.go:334] "Generic (PLEG): container finished" podID="723e4395-18dd-4729-be31-1c5ccf8e7ec8" containerID="b707dce76d47c3f5f3fe7494b4313001cb8a41dd8a4242b64cf1874334029873" exitCode=0 Sep 29 19:27:45 crc kubenswrapper[4792]: I0929 19:27:45.856268 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-6xccv" event={"ID":"723e4395-18dd-4729-be31-1c5ccf8e7ec8","Type":"ContainerDied","Data":"b707dce76d47c3f5f3fe7494b4313001cb8a41dd8a4242b64cf1874334029873"} Sep 29 19:27:47 crc kubenswrapper[4792]: I0929 19:27:47.243232 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-6xccv" Sep 29 19:27:47 crc kubenswrapper[4792]: I0929 19:27:47.338148 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/723e4395-18dd-4729-be31-1c5ccf8e7ec8-ssh-key\") pod \"723e4395-18dd-4729-be31-1c5ccf8e7ec8\" (UID: \"723e4395-18dd-4729-be31-1c5ccf8e7ec8\") " Sep 29 19:27:47 crc kubenswrapper[4792]: I0929 19:27:47.338467 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gmd6n\" (UniqueName: \"kubernetes.io/projected/723e4395-18dd-4729-be31-1c5ccf8e7ec8-kube-api-access-gmd6n\") pod \"723e4395-18dd-4729-be31-1c5ccf8e7ec8\" (UID: \"723e4395-18dd-4729-be31-1c5ccf8e7ec8\") " Sep 29 19:27:47 crc kubenswrapper[4792]: I0929 19:27:47.338579 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/723e4395-18dd-4729-be31-1c5ccf8e7ec8-inventory\") pod \"723e4395-18dd-4729-be31-1c5ccf8e7ec8\" (UID: \"723e4395-18dd-4729-be31-1c5ccf8e7ec8\") " Sep 29 19:27:47 crc kubenswrapper[4792]: I0929 19:27:47.343512 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/723e4395-18dd-4729-be31-1c5ccf8e7ec8-kube-api-access-gmd6n" (OuterVolumeSpecName: "kube-api-access-gmd6n") pod "723e4395-18dd-4729-be31-1c5ccf8e7ec8" (UID: "723e4395-18dd-4729-be31-1c5ccf8e7ec8"). InnerVolumeSpecName "kube-api-access-gmd6n". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:27:47 crc kubenswrapper[4792]: I0929 19:27:47.364019 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/723e4395-18dd-4729-be31-1c5ccf8e7ec8-inventory" (OuterVolumeSpecName: "inventory") pod "723e4395-18dd-4729-be31-1c5ccf8e7ec8" (UID: "723e4395-18dd-4729-be31-1c5ccf8e7ec8"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:27:47 crc kubenswrapper[4792]: I0929 19:27:47.366147 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/723e4395-18dd-4729-be31-1c5ccf8e7ec8-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "723e4395-18dd-4729-be31-1c5ccf8e7ec8" (UID: "723e4395-18dd-4729-be31-1c5ccf8e7ec8"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:27:47 crc kubenswrapper[4792]: I0929 19:27:47.440808 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gmd6n\" (UniqueName: \"kubernetes.io/projected/723e4395-18dd-4729-be31-1c5ccf8e7ec8-kube-api-access-gmd6n\") on node \"crc\" DevicePath \"\"" Sep 29 19:27:47 crc kubenswrapper[4792]: I0929 19:27:47.441195 4792 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/723e4395-18dd-4729-be31-1c5ccf8e7ec8-inventory\") on node \"crc\" DevicePath \"\"" Sep 29 19:27:47 crc kubenswrapper[4792]: I0929 19:27:47.441373 4792 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/723e4395-18dd-4729-be31-1c5ccf8e7ec8-ssh-key\") on node \"crc\" DevicePath \"\"" Sep 29 19:27:47 crc kubenswrapper[4792]: I0929 19:27:47.874083 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-6xccv" event={"ID":"723e4395-18dd-4729-be31-1c5ccf8e7ec8","Type":"ContainerDied","Data":"9cfd956664095551d09ee3d9a20c467181dde26b535c0a7f8a6e1ddf298e896e"} Sep 29 19:27:47 crc kubenswrapper[4792]: I0929 19:27:47.874132 4792 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9cfd956664095551d09ee3d9a20c467181dde26b535c0a7f8a6e1ddf298e896e" Sep 29 19:27:47 crc kubenswrapper[4792]: I0929 19:27:47.874133 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-6xccv" Sep 29 19:27:47 crc kubenswrapper[4792]: I0929 19:27:47.951367 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/validate-network-edpm-deployment-openstack-edpm-ipam-frsdd"] Sep 29 19:27:47 crc kubenswrapper[4792]: E0929 19:27:47.951812 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0ad88da0-baf6-45a4-9a5a-cb6ace790db8" containerName="registry-server" Sep 29 19:27:47 crc kubenswrapper[4792]: I0929 19:27:47.951833 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="0ad88da0-baf6-45a4-9a5a-cb6ace790db8" containerName="registry-server" Sep 29 19:27:47 crc kubenswrapper[4792]: E0929 19:27:47.951880 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0ad88da0-baf6-45a4-9a5a-cb6ace790db8" containerName="extract-utilities" Sep 29 19:27:47 crc kubenswrapper[4792]: I0929 19:27:47.951892 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="0ad88da0-baf6-45a4-9a5a-cb6ace790db8" containerName="extract-utilities" Sep 29 19:27:47 crc kubenswrapper[4792]: E0929 19:27:47.951914 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0ad88da0-baf6-45a4-9a5a-cb6ace790db8" containerName="extract-content" Sep 29 19:27:47 crc kubenswrapper[4792]: I0929 19:27:47.951922 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="0ad88da0-baf6-45a4-9a5a-cb6ace790db8" containerName="extract-content" Sep 29 19:27:47 crc kubenswrapper[4792]: E0929 19:27:47.951936 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="723e4395-18dd-4729-be31-1c5ccf8e7ec8" containerName="configure-network-edpm-deployment-openstack-edpm-ipam" Sep 29 19:27:47 crc kubenswrapper[4792]: I0929 19:27:47.951946 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="723e4395-18dd-4729-be31-1c5ccf8e7ec8" containerName="configure-network-edpm-deployment-openstack-edpm-ipam" Sep 29 19:27:47 crc kubenswrapper[4792]: I0929 19:27:47.952164 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="723e4395-18dd-4729-be31-1c5ccf8e7ec8" containerName="configure-network-edpm-deployment-openstack-edpm-ipam" Sep 29 19:27:47 crc kubenswrapper[4792]: I0929 19:27:47.952193 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="0ad88da0-baf6-45a4-9a5a-cb6ace790db8" containerName="registry-server" Sep 29 19:27:47 crc kubenswrapper[4792]: I0929 19:27:47.952952 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-frsdd" Sep 29 19:27:47 crc kubenswrapper[4792]: I0929 19:27:47.954428 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Sep 29 19:27:47 crc kubenswrapper[4792]: I0929 19:27:47.955509 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Sep 29 19:27:47 crc kubenswrapper[4792]: I0929 19:27:47.955512 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Sep 29 19:27:47 crc kubenswrapper[4792]: I0929 19:27:47.956715 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-jvdxd" Sep 29 19:27:47 crc kubenswrapper[4792]: I0929 19:27:47.966143 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/validate-network-edpm-deployment-openstack-edpm-ipam-frsdd"] Sep 29 19:27:48 crc kubenswrapper[4792]: I0929 19:27:48.052924 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9jkl9\" (UniqueName: \"kubernetes.io/projected/6040c28a-468b-4253-8a8f-8fc98326b48b-kube-api-access-9jkl9\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-frsdd\" (UID: \"6040c28a-468b-4253-8a8f-8fc98326b48b\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-frsdd" Sep 29 19:27:48 crc kubenswrapper[4792]: I0929 19:27:48.053011 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/6040c28a-468b-4253-8a8f-8fc98326b48b-ssh-key\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-frsdd\" (UID: \"6040c28a-468b-4253-8a8f-8fc98326b48b\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-frsdd" Sep 29 19:27:48 crc kubenswrapper[4792]: I0929 19:27:48.053083 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/6040c28a-468b-4253-8a8f-8fc98326b48b-inventory\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-frsdd\" (UID: \"6040c28a-468b-4253-8a8f-8fc98326b48b\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-frsdd" Sep 29 19:27:48 crc kubenswrapper[4792]: I0929 19:27:48.154645 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/6040c28a-468b-4253-8a8f-8fc98326b48b-ssh-key\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-frsdd\" (UID: \"6040c28a-468b-4253-8a8f-8fc98326b48b\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-frsdd" Sep 29 19:27:48 crc kubenswrapper[4792]: I0929 19:27:48.154689 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/6040c28a-468b-4253-8a8f-8fc98326b48b-inventory\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-frsdd\" (UID: \"6040c28a-468b-4253-8a8f-8fc98326b48b\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-frsdd" Sep 29 19:27:48 crc kubenswrapper[4792]: I0929 19:27:48.154885 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9jkl9\" (UniqueName: \"kubernetes.io/projected/6040c28a-468b-4253-8a8f-8fc98326b48b-kube-api-access-9jkl9\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-frsdd\" (UID: \"6040c28a-468b-4253-8a8f-8fc98326b48b\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-frsdd" Sep 29 19:27:48 crc kubenswrapper[4792]: I0929 19:27:48.160112 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/6040c28a-468b-4253-8a8f-8fc98326b48b-ssh-key\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-frsdd\" (UID: \"6040c28a-468b-4253-8a8f-8fc98326b48b\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-frsdd" Sep 29 19:27:48 crc kubenswrapper[4792]: I0929 19:27:48.166368 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/6040c28a-468b-4253-8a8f-8fc98326b48b-inventory\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-frsdd\" (UID: \"6040c28a-468b-4253-8a8f-8fc98326b48b\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-frsdd" Sep 29 19:27:48 crc kubenswrapper[4792]: I0929 19:27:48.177924 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9jkl9\" (UniqueName: \"kubernetes.io/projected/6040c28a-468b-4253-8a8f-8fc98326b48b-kube-api-access-9jkl9\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-frsdd\" (UID: \"6040c28a-468b-4253-8a8f-8fc98326b48b\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-frsdd" Sep 29 19:27:48 crc kubenswrapper[4792]: I0929 19:27:48.270953 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-frsdd" Sep 29 19:27:48 crc kubenswrapper[4792]: I0929 19:27:48.788090 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/validate-network-edpm-deployment-openstack-edpm-ipam-frsdd"] Sep 29 19:27:48 crc kubenswrapper[4792]: I0929 19:27:48.884932 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-frsdd" event={"ID":"6040c28a-468b-4253-8a8f-8fc98326b48b","Type":"ContainerStarted","Data":"6f4664c292c0d99f6300c9b73a3cd0df75013adecf74b3e8221f372b95608785"} Sep 29 19:27:49 crc kubenswrapper[4792]: I0929 19:27:49.893144 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-frsdd" event={"ID":"6040c28a-468b-4253-8a8f-8fc98326b48b","Type":"ContainerStarted","Data":"67003c44aa8e1d1c6153988f037807bd39297c4a53042480c6c122a4a4a2842c"} Sep 29 19:27:49 crc kubenswrapper[4792]: I0929 19:27:49.923883 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-frsdd" podStartSLOduration=2.725559007 podStartE2EDuration="2.923865004s" podCreationTimestamp="2025-09-29 19:27:47 +0000 UTC" firstStartedPulling="2025-09-29 19:27:48.794185228 +0000 UTC m=+1880.787492624" lastFinishedPulling="2025-09-29 19:27:48.992491225 +0000 UTC m=+1880.985798621" observedRunningTime="2025-09-29 19:27:49.9168229 +0000 UTC m=+1881.910130296" watchObservedRunningTime="2025-09-29 19:27:49.923865004 +0000 UTC m=+1881.917172400" Sep 29 19:27:52 crc kubenswrapper[4792]: I0929 19:27:52.015785 4792 scope.go:117] "RemoveContainer" containerID="ccfabdbbd2fc28db3a7759f30ddf4fbe532580d663aea81dbf9d9f716c69f3f7" Sep 29 19:27:52 crc kubenswrapper[4792]: E0929 19:27:52.016563 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p5q59_openshift-machine-config-operator(0ae66548-086e-4ca9-bd6f-281ce46e7557)\"" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" podUID="0ae66548-086e-4ca9-bd6f-281ce46e7557" Sep 29 19:27:54 crc kubenswrapper[4792]: I0929 19:27:54.940481 4792 generic.go:334] "Generic (PLEG): container finished" podID="6040c28a-468b-4253-8a8f-8fc98326b48b" containerID="67003c44aa8e1d1c6153988f037807bd39297c4a53042480c6c122a4a4a2842c" exitCode=0 Sep 29 19:27:54 crc kubenswrapper[4792]: I0929 19:27:54.940590 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-frsdd" event={"ID":"6040c28a-468b-4253-8a8f-8fc98326b48b","Type":"ContainerDied","Data":"67003c44aa8e1d1c6153988f037807bd39297c4a53042480c6c122a4a4a2842c"} Sep 29 19:27:56 crc kubenswrapper[4792]: I0929 19:27:56.389481 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-frsdd" Sep 29 19:27:56 crc kubenswrapper[4792]: I0929 19:27:56.532933 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9jkl9\" (UniqueName: \"kubernetes.io/projected/6040c28a-468b-4253-8a8f-8fc98326b48b-kube-api-access-9jkl9\") pod \"6040c28a-468b-4253-8a8f-8fc98326b48b\" (UID: \"6040c28a-468b-4253-8a8f-8fc98326b48b\") " Sep 29 19:27:56 crc kubenswrapper[4792]: I0929 19:27:56.533118 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/6040c28a-468b-4253-8a8f-8fc98326b48b-inventory\") pod \"6040c28a-468b-4253-8a8f-8fc98326b48b\" (UID: \"6040c28a-468b-4253-8a8f-8fc98326b48b\") " Sep 29 19:27:56 crc kubenswrapper[4792]: I0929 19:27:56.533204 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/6040c28a-468b-4253-8a8f-8fc98326b48b-ssh-key\") pod \"6040c28a-468b-4253-8a8f-8fc98326b48b\" (UID: \"6040c28a-468b-4253-8a8f-8fc98326b48b\") " Sep 29 19:27:56 crc kubenswrapper[4792]: I0929 19:27:56.540575 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6040c28a-468b-4253-8a8f-8fc98326b48b-kube-api-access-9jkl9" (OuterVolumeSpecName: "kube-api-access-9jkl9") pod "6040c28a-468b-4253-8a8f-8fc98326b48b" (UID: "6040c28a-468b-4253-8a8f-8fc98326b48b"). InnerVolumeSpecName "kube-api-access-9jkl9". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:27:56 crc kubenswrapper[4792]: I0929 19:27:56.562242 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6040c28a-468b-4253-8a8f-8fc98326b48b-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "6040c28a-468b-4253-8a8f-8fc98326b48b" (UID: "6040c28a-468b-4253-8a8f-8fc98326b48b"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:27:56 crc kubenswrapper[4792]: I0929 19:27:56.563569 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6040c28a-468b-4253-8a8f-8fc98326b48b-inventory" (OuterVolumeSpecName: "inventory") pod "6040c28a-468b-4253-8a8f-8fc98326b48b" (UID: "6040c28a-468b-4253-8a8f-8fc98326b48b"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:27:56 crc kubenswrapper[4792]: I0929 19:27:56.636771 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9jkl9\" (UniqueName: \"kubernetes.io/projected/6040c28a-468b-4253-8a8f-8fc98326b48b-kube-api-access-9jkl9\") on node \"crc\" DevicePath \"\"" Sep 29 19:27:56 crc kubenswrapper[4792]: I0929 19:27:56.636815 4792 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/6040c28a-468b-4253-8a8f-8fc98326b48b-inventory\") on node \"crc\" DevicePath \"\"" Sep 29 19:27:56 crc kubenswrapper[4792]: I0929 19:27:56.636828 4792 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/6040c28a-468b-4253-8a8f-8fc98326b48b-ssh-key\") on node \"crc\" DevicePath \"\"" Sep 29 19:27:56 crc kubenswrapper[4792]: I0929 19:27:56.960321 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-frsdd" event={"ID":"6040c28a-468b-4253-8a8f-8fc98326b48b","Type":"ContainerDied","Data":"6f4664c292c0d99f6300c9b73a3cd0df75013adecf74b3e8221f372b95608785"} Sep 29 19:27:56 crc kubenswrapper[4792]: I0929 19:27:56.960691 4792 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6f4664c292c0d99f6300c9b73a3cd0df75013adecf74b3e8221f372b95608785" Sep 29 19:27:56 crc kubenswrapper[4792]: I0929 19:27:56.960424 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-frsdd" Sep 29 19:27:57 crc kubenswrapper[4792]: I0929 19:27:57.090094 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/install-os-edpm-deployment-openstack-edpm-ipam-ppdnh"] Sep 29 19:27:57 crc kubenswrapper[4792]: E0929 19:27:57.090696 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6040c28a-468b-4253-8a8f-8fc98326b48b" containerName="validate-network-edpm-deployment-openstack-edpm-ipam" Sep 29 19:27:57 crc kubenswrapper[4792]: I0929 19:27:57.090721 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="6040c28a-468b-4253-8a8f-8fc98326b48b" containerName="validate-network-edpm-deployment-openstack-edpm-ipam" Sep 29 19:27:57 crc kubenswrapper[4792]: I0929 19:27:57.091326 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="6040c28a-468b-4253-8a8f-8fc98326b48b" containerName="validate-network-edpm-deployment-openstack-edpm-ipam" Sep 29 19:27:57 crc kubenswrapper[4792]: I0929 19:27:57.096600 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-ppdnh" Sep 29 19:27:57 crc kubenswrapper[4792]: I0929 19:27:57.099281 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-jvdxd" Sep 29 19:27:57 crc kubenswrapper[4792]: I0929 19:27:57.099513 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Sep 29 19:27:57 crc kubenswrapper[4792]: I0929 19:27:57.099800 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Sep 29 19:27:57 crc kubenswrapper[4792]: I0929 19:27:57.102323 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-os-edpm-deployment-openstack-edpm-ipam-ppdnh"] Sep 29 19:27:57 crc kubenswrapper[4792]: I0929 19:27:57.110129 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Sep 29 19:27:57 crc kubenswrapper[4792]: I0929 19:27:57.246803 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/af95758e-6a40-4679-ba1c-8ebf988f1865-inventory\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-ppdnh\" (UID: \"af95758e-6a40-4679-ba1c-8ebf988f1865\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-ppdnh" Sep 29 19:27:57 crc kubenswrapper[4792]: I0929 19:27:57.247181 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/af95758e-6a40-4679-ba1c-8ebf988f1865-ssh-key\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-ppdnh\" (UID: \"af95758e-6a40-4679-ba1c-8ebf988f1865\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-ppdnh" Sep 29 19:27:57 crc kubenswrapper[4792]: I0929 19:27:57.247431 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-55smk\" (UniqueName: \"kubernetes.io/projected/af95758e-6a40-4679-ba1c-8ebf988f1865-kube-api-access-55smk\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-ppdnh\" (UID: \"af95758e-6a40-4679-ba1c-8ebf988f1865\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-ppdnh" Sep 29 19:27:57 crc kubenswrapper[4792]: I0929 19:27:57.349177 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-55smk\" (UniqueName: \"kubernetes.io/projected/af95758e-6a40-4679-ba1c-8ebf988f1865-kube-api-access-55smk\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-ppdnh\" (UID: \"af95758e-6a40-4679-ba1c-8ebf988f1865\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-ppdnh" Sep 29 19:27:57 crc kubenswrapper[4792]: I0929 19:27:57.349247 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/af95758e-6a40-4679-ba1c-8ebf988f1865-inventory\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-ppdnh\" (UID: \"af95758e-6a40-4679-ba1c-8ebf988f1865\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-ppdnh" Sep 29 19:27:57 crc kubenswrapper[4792]: I0929 19:27:57.349286 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/af95758e-6a40-4679-ba1c-8ebf988f1865-ssh-key\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-ppdnh\" (UID: \"af95758e-6a40-4679-ba1c-8ebf988f1865\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-ppdnh" Sep 29 19:27:57 crc kubenswrapper[4792]: I0929 19:27:57.353544 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/af95758e-6a40-4679-ba1c-8ebf988f1865-inventory\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-ppdnh\" (UID: \"af95758e-6a40-4679-ba1c-8ebf988f1865\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-ppdnh" Sep 29 19:27:57 crc kubenswrapper[4792]: I0929 19:27:57.353567 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/af95758e-6a40-4679-ba1c-8ebf988f1865-ssh-key\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-ppdnh\" (UID: \"af95758e-6a40-4679-ba1c-8ebf988f1865\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-ppdnh" Sep 29 19:27:57 crc kubenswrapper[4792]: I0929 19:27:57.372562 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-55smk\" (UniqueName: \"kubernetes.io/projected/af95758e-6a40-4679-ba1c-8ebf988f1865-kube-api-access-55smk\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-ppdnh\" (UID: \"af95758e-6a40-4679-ba1c-8ebf988f1865\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-ppdnh" Sep 29 19:27:57 crc kubenswrapper[4792]: I0929 19:27:57.429450 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-ppdnh" Sep 29 19:27:57 crc kubenswrapper[4792]: I0929 19:27:57.965397 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-os-edpm-deployment-openstack-edpm-ipam-ppdnh"] Sep 29 19:27:58 crc kubenswrapper[4792]: I0929 19:27:58.980424 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-ppdnh" event={"ID":"af95758e-6a40-4679-ba1c-8ebf988f1865","Type":"ContainerStarted","Data":"244f30e73e138783b4fc25308b8f2a05bbf90de07d06045aefd04ee77ea2191d"} Sep 29 19:27:58 crc kubenswrapper[4792]: I0929 19:27:58.980772 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-ppdnh" event={"ID":"af95758e-6a40-4679-ba1c-8ebf988f1865","Type":"ContainerStarted","Data":"2b46644e04e3fa3a7334602ecb4b1ae751a5397da8c3b6f7c93159b081397b84"} Sep 29 19:27:59 crc kubenswrapper[4792]: I0929 19:27:59.002462 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-ppdnh" podStartSLOduration=1.84463038 podStartE2EDuration="2.002439738s" podCreationTimestamp="2025-09-29 19:27:57 +0000 UTC" firstStartedPulling="2025-09-29 19:27:57.985973752 +0000 UTC m=+1889.979281198" lastFinishedPulling="2025-09-29 19:27:58.14378314 +0000 UTC m=+1890.137090556" observedRunningTime="2025-09-29 19:27:58.996369379 +0000 UTC m=+1890.989676775" watchObservedRunningTime="2025-09-29 19:27:59.002439738 +0000 UTC m=+1890.995747134" Sep 29 19:28:01 crc kubenswrapper[4792]: I0929 19:28:01.051302 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-k2c4z"] Sep 29 19:28:01 crc kubenswrapper[4792]: I0929 19:28:01.064495 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-k2c4z"] Sep 29 19:28:03 crc kubenswrapper[4792]: I0929 19:28:03.034551 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b3ee2364-5037-46f9-88b2-abb515fdc1b0" path="/var/lib/kubelet/pods/b3ee2364-5037-46f9-88b2-abb515fdc1b0/volumes" Sep 29 19:28:06 crc kubenswrapper[4792]: I0929 19:28:06.016011 4792 scope.go:117] "RemoveContainer" containerID="ccfabdbbd2fc28db3a7759f30ddf4fbe532580d663aea81dbf9d9f716c69f3f7" Sep 29 19:28:06 crc kubenswrapper[4792]: E0929 19:28:06.016640 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p5q59_openshift-machine-config-operator(0ae66548-086e-4ca9-bd6f-281ce46e7557)\"" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" podUID="0ae66548-086e-4ca9-bd6f-281ce46e7557" Sep 29 19:28:17 crc kubenswrapper[4792]: I0929 19:28:17.015558 4792 scope.go:117] "RemoveContainer" containerID="ccfabdbbd2fc28db3a7759f30ddf4fbe532580d663aea81dbf9d9f716c69f3f7" Sep 29 19:28:17 crc kubenswrapper[4792]: E0929 19:28:17.016311 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p5q59_openshift-machine-config-operator(0ae66548-086e-4ca9-bd6f-281ce46e7557)\"" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" podUID="0ae66548-086e-4ca9-bd6f-281ce46e7557" Sep 29 19:28:25 crc kubenswrapper[4792]: I0929 19:28:25.071040 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-pfgjk"] Sep 29 19:28:25 crc kubenswrapper[4792]: I0929 19:28:25.086705 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-cell-mapping-zzmwh"] Sep 29 19:28:25 crc kubenswrapper[4792]: I0929 19:28:25.110350 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-cell-mapping-zzmwh"] Sep 29 19:28:25 crc kubenswrapper[4792]: I0929 19:28:25.114627 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-pfgjk"] Sep 29 19:28:27 crc kubenswrapper[4792]: I0929 19:28:27.029443 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3e141a44-a3f2-4d4c-af13-e6ca3d76ea01" path="/var/lib/kubelet/pods/3e141a44-a3f2-4d4c-af13-e6ca3d76ea01/volumes" Sep 29 19:28:27 crc kubenswrapper[4792]: I0929 19:28:27.032720 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="683f150a-35bc-423d-af3a-005d3a967d88" path="/var/lib/kubelet/pods/683f150a-35bc-423d-af3a-005d3a967d88/volumes" Sep 29 19:28:31 crc kubenswrapper[4792]: I0929 19:28:31.015799 4792 scope.go:117] "RemoveContainer" containerID="ccfabdbbd2fc28db3a7759f30ddf4fbe532580d663aea81dbf9d9f716c69f3f7" Sep 29 19:28:31 crc kubenswrapper[4792]: E0929 19:28:31.016705 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p5q59_openshift-machine-config-operator(0ae66548-086e-4ca9-bd6f-281ce46e7557)\"" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" podUID="0ae66548-086e-4ca9-bd6f-281ce46e7557" Sep 29 19:28:37 crc kubenswrapper[4792]: I0929 19:28:37.239812 4792 scope.go:117] "RemoveContainer" containerID="31bc47d37f16098e5e073a6f949770c00105b4895e34cd4ab1ea5f3bf0a9734a" Sep 29 19:28:37 crc kubenswrapper[4792]: I0929 19:28:37.259766 4792 scope.go:117] "RemoveContainer" containerID="437cffbc7687f5e1387fea4c43fdd36e1183bf1e81416221b44a8a6819c55e17" Sep 29 19:28:37 crc kubenswrapper[4792]: I0929 19:28:37.341184 4792 scope.go:117] "RemoveContainer" containerID="a66e8d41695d0d35ba48561d3bf9e2a54e0b4d480b4091ce7a54ec48f6bb7ebd" Sep 29 19:28:37 crc kubenswrapper[4792]: I0929 19:28:37.375177 4792 scope.go:117] "RemoveContainer" containerID="92fc46631ef693a3f819c878eda51eb6a3b4a7dbfc255d6e415ea9ff92df2819" Sep 29 19:28:37 crc kubenswrapper[4792]: I0929 19:28:37.433892 4792 scope.go:117] "RemoveContainer" containerID="3a0687b6f4e62cf46bfd15ea6f7a91fadb4104a6dc851ae3bed5ebebff951b41" Sep 29 19:28:37 crc kubenswrapper[4792]: I0929 19:28:37.455749 4792 scope.go:117] "RemoveContainer" containerID="22dd093354f323ca6a47071b3b9926c7c358e28bf7d19edd123f99a117a26596" Sep 29 19:28:44 crc kubenswrapper[4792]: I0929 19:28:44.015279 4792 scope.go:117] "RemoveContainer" containerID="ccfabdbbd2fc28db3a7759f30ddf4fbe532580d663aea81dbf9d9f716c69f3f7" Sep 29 19:28:44 crc kubenswrapper[4792]: E0929 19:28:44.016035 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p5q59_openshift-machine-config-operator(0ae66548-086e-4ca9-bd6f-281ce46e7557)\"" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" podUID="0ae66548-086e-4ca9-bd6f-281ce46e7557" Sep 29 19:28:44 crc kubenswrapper[4792]: I0929 19:28:44.479905 4792 generic.go:334] "Generic (PLEG): container finished" podID="af95758e-6a40-4679-ba1c-8ebf988f1865" containerID="244f30e73e138783b4fc25308b8f2a05bbf90de07d06045aefd04ee77ea2191d" exitCode=0 Sep 29 19:28:44 crc kubenswrapper[4792]: I0929 19:28:44.479947 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-ppdnh" event={"ID":"af95758e-6a40-4679-ba1c-8ebf988f1865","Type":"ContainerDied","Data":"244f30e73e138783b4fc25308b8f2a05bbf90de07d06045aefd04ee77ea2191d"} Sep 29 19:28:45 crc kubenswrapper[4792]: I0929 19:28:45.921211 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-ppdnh" Sep 29 19:28:45 crc kubenswrapper[4792]: I0929 19:28:45.998430 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/af95758e-6a40-4679-ba1c-8ebf988f1865-inventory\") pod \"af95758e-6a40-4679-ba1c-8ebf988f1865\" (UID: \"af95758e-6a40-4679-ba1c-8ebf988f1865\") " Sep 29 19:28:45 crc kubenswrapper[4792]: I0929 19:28:45.998584 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-55smk\" (UniqueName: \"kubernetes.io/projected/af95758e-6a40-4679-ba1c-8ebf988f1865-kube-api-access-55smk\") pod \"af95758e-6a40-4679-ba1c-8ebf988f1865\" (UID: \"af95758e-6a40-4679-ba1c-8ebf988f1865\") " Sep 29 19:28:45 crc kubenswrapper[4792]: I0929 19:28:45.998651 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/af95758e-6a40-4679-ba1c-8ebf988f1865-ssh-key\") pod \"af95758e-6a40-4679-ba1c-8ebf988f1865\" (UID: \"af95758e-6a40-4679-ba1c-8ebf988f1865\") " Sep 29 19:28:46 crc kubenswrapper[4792]: I0929 19:28:46.004419 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/af95758e-6a40-4679-ba1c-8ebf988f1865-kube-api-access-55smk" (OuterVolumeSpecName: "kube-api-access-55smk") pod "af95758e-6a40-4679-ba1c-8ebf988f1865" (UID: "af95758e-6a40-4679-ba1c-8ebf988f1865"). InnerVolumeSpecName "kube-api-access-55smk". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:28:46 crc kubenswrapper[4792]: I0929 19:28:46.026637 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/af95758e-6a40-4679-ba1c-8ebf988f1865-inventory" (OuterVolumeSpecName: "inventory") pod "af95758e-6a40-4679-ba1c-8ebf988f1865" (UID: "af95758e-6a40-4679-ba1c-8ebf988f1865"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:28:46 crc kubenswrapper[4792]: I0929 19:28:46.029459 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/af95758e-6a40-4679-ba1c-8ebf988f1865-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "af95758e-6a40-4679-ba1c-8ebf988f1865" (UID: "af95758e-6a40-4679-ba1c-8ebf988f1865"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:28:46 crc kubenswrapper[4792]: I0929 19:28:46.101419 4792 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/af95758e-6a40-4679-ba1c-8ebf988f1865-inventory\") on node \"crc\" DevicePath \"\"" Sep 29 19:28:46 crc kubenswrapper[4792]: I0929 19:28:46.101457 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-55smk\" (UniqueName: \"kubernetes.io/projected/af95758e-6a40-4679-ba1c-8ebf988f1865-kube-api-access-55smk\") on node \"crc\" DevicePath \"\"" Sep 29 19:28:46 crc kubenswrapper[4792]: I0929 19:28:46.101469 4792 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/af95758e-6a40-4679-ba1c-8ebf988f1865-ssh-key\") on node \"crc\" DevicePath \"\"" Sep 29 19:28:46 crc kubenswrapper[4792]: I0929 19:28:46.501902 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-ppdnh" event={"ID":"af95758e-6a40-4679-ba1c-8ebf988f1865","Type":"ContainerDied","Data":"2b46644e04e3fa3a7334602ecb4b1ae751a5397da8c3b6f7c93159b081397b84"} Sep 29 19:28:46 crc kubenswrapper[4792]: I0929 19:28:46.501951 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-ppdnh" Sep 29 19:28:46 crc kubenswrapper[4792]: I0929 19:28:46.501955 4792 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2b46644e04e3fa3a7334602ecb4b1ae751a5397da8c3b6f7c93159b081397b84" Sep 29 19:28:46 crc kubenswrapper[4792]: I0929 19:28:46.595571 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-jfzzt"] Sep 29 19:28:46 crc kubenswrapper[4792]: E0929 19:28:46.595928 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="af95758e-6a40-4679-ba1c-8ebf988f1865" containerName="install-os-edpm-deployment-openstack-edpm-ipam" Sep 29 19:28:46 crc kubenswrapper[4792]: I0929 19:28:46.595948 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="af95758e-6a40-4679-ba1c-8ebf988f1865" containerName="install-os-edpm-deployment-openstack-edpm-ipam" Sep 29 19:28:46 crc kubenswrapper[4792]: I0929 19:28:46.596212 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="af95758e-6a40-4679-ba1c-8ebf988f1865" containerName="install-os-edpm-deployment-openstack-edpm-ipam" Sep 29 19:28:46 crc kubenswrapper[4792]: I0929 19:28:46.596787 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-jfzzt" Sep 29 19:28:46 crc kubenswrapper[4792]: I0929 19:28:46.599213 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Sep 29 19:28:46 crc kubenswrapper[4792]: I0929 19:28:46.599518 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Sep 29 19:28:46 crc kubenswrapper[4792]: I0929 19:28:46.599688 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-jvdxd" Sep 29 19:28:46 crc kubenswrapper[4792]: I0929 19:28:46.600249 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Sep 29 19:28:46 crc kubenswrapper[4792]: I0929 19:28:46.617416 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-jfzzt"] Sep 29 19:28:46 crc kubenswrapper[4792]: I0929 19:28:46.712770 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ql77x\" (UniqueName: \"kubernetes.io/projected/5d30a56f-01e0-422e-99bd-08328d009094-kube-api-access-ql77x\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-jfzzt\" (UID: \"5d30a56f-01e0-422e-99bd-08328d009094\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-jfzzt" Sep 29 19:28:46 crc kubenswrapper[4792]: I0929 19:28:46.712812 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/5d30a56f-01e0-422e-99bd-08328d009094-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-jfzzt\" (UID: \"5d30a56f-01e0-422e-99bd-08328d009094\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-jfzzt" Sep 29 19:28:46 crc kubenswrapper[4792]: I0929 19:28:46.713184 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/5d30a56f-01e0-422e-99bd-08328d009094-ssh-key\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-jfzzt\" (UID: \"5d30a56f-01e0-422e-99bd-08328d009094\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-jfzzt" Sep 29 19:28:46 crc kubenswrapper[4792]: I0929 19:28:46.815603 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/5d30a56f-01e0-422e-99bd-08328d009094-ssh-key\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-jfzzt\" (UID: \"5d30a56f-01e0-422e-99bd-08328d009094\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-jfzzt" Sep 29 19:28:46 crc kubenswrapper[4792]: I0929 19:28:46.815699 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ql77x\" (UniqueName: \"kubernetes.io/projected/5d30a56f-01e0-422e-99bd-08328d009094-kube-api-access-ql77x\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-jfzzt\" (UID: \"5d30a56f-01e0-422e-99bd-08328d009094\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-jfzzt" Sep 29 19:28:46 crc kubenswrapper[4792]: I0929 19:28:46.815727 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/5d30a56f-01e0-422e-99bd-08328d009094-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-jfzzt\" (UID: \"5d30a56f-01e0-422e-99bd-08328d009094\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-jfzzt" Sep 29 19:28:46 crc kubenswrapper[4792]: I0929 19:28:46.826445 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/5d30a56f-01e0-422e-99bd-08328d009094-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-jfzzt\" (UID: \"5d30a56f-01e0-422e-99bd-08328d009094\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-jfzzt" Sep 29 19:28:46 crc kubenswrapper[4792]: I0929 19:28:46.827210 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/5d30a56f-01e0-422e-99bd-08328d009094-ssh-key\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-jfzzt\" (UID: \"5d30a56f-01e0-422e-99bd-08328d009094\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-jfzzt" Sep 29 19:28:46 crc kubenswrapper[4792]: I0929 19:28:46.833393 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ql77x\" (UniqueName: \"kubernetes.io/projected/5d30a56f-01e0-422e-99bd-08328d009094-kube-api-access-ql77x\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-jfzzt\" (UID: \"5d30a56f-01e0-422e-99bd-08328d009094\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-jfzzt" Sep 29 19:28:46 crc kubenswrapper[4792]: I0929 19:28:46.917586 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-jfzzt" Sep 29 19:28:47 crc kubenswrapper[4792]: I0929 19:28:47.452783 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-jfzzt"] Sep 29 19:28:47 crc kubenswrapper[4792]: I0929 19:28:47.512563 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-jfzzt" event={"ID":"5d30a56f-01e0-422e-99bd-08328d009094","Type":"ContainerStarted","Data":"651341d3a021a4b375c647fad7f665b8bd6f4f1342bde3c6e0b51aaa5e152e0e"} Sep 29 19:28:48 crc kubenswrapper[4792]: I0929 19:28:48.524563 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-jfzzt" event={"ID":"5d30a56f-01e0-422e-99bd-08328d009094","Type":"ContainerStarted","Data":"371013bbd4c583e292e4a8dd66bcee6345caaa4539b3c4b4b660690fc959f8cc"} Sep 29 19:28:48 crc kubenswrapper[4792]: I0929 19:28:48.546748 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-jfzzt" podStartSLOduration=2.393613386 podStartE2EDuration="2.54672982s" podCreationTimestamp="2025-09-29 19:28:46 +0000 UTC" firstStartedPulling="2025-09-29 19:28:47.457872983 +0000 UTC m=+1939.451180409" lastFinishedPulling="2025-09-29 19:28:47.610989447 +0000 UTC m=+1939.604296843" observedRunningTime="2025-09-29 19:28:48.541456912 +0000 UTC m=+1940.534764318" watchObservedRunningTime="2025-09-29 19:28:48.54672982 +0000 UTC m=+1940.540037216" Sep 29 19:28:57 crc kubenswrapper[4792]: I0929 19:28:57.015067 4792 scope.go:117] "RemoveContainer" containerID="ccfabdbbd2fc28db3a7759f30ddf4fbe532580d663aea81dbf9d9f716c69f3f7" Sep 29 19:28:57 crc kubenswrapper[4792]: E0929 19:28:57.015755 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p5q59_openshift-machine-config-operator(0ae66548-086e-4ca9-bd6f-281ce46e7557)\"" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" podUID="0ae66548-086e-4ca9-bd6f-281ce46e7557" Sep 29 19:29:10 crc kubenswrapper[4792]: I0929 19:29:10.016754 4792 scope.go:117] "RemoveContainer" containerID="ccfabdbbd2fc28db3a7759f30ddf4fbe532580d663aea81dbf9d9f716c69f3f7" Sep 29 19:29:10 crc kubenswrapper[4792]: E0929 19:29:10.017638 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p5q59_openshift-machine-config-operator(0ae66548-086e-4ca9-bd6f-281ce46e7557)\"" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" podUID="0ae66548-086e-4ca9-bd6f-281ce46e7557" Sep 29 19:29:10 crc kubenswrapper[4792]: I0929 19:29:10.051884 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-cell-mapping-w6z2z"] Sep 29 19:29:10 crc kubenswrapper[4792]: I0929 19:29:10.061332 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-cell-mapping-w6z2z"] Sep 29 19:29:11 crc kubenswrapper[4792]: I0929 19:29:11.025550 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="94b5373f-d637-47ab-90eb-3d83e2a38886" path="/var/lib/kubelet/pods/94b5373f-d637-47ab-90eb-3d83e2a38886/volumes" Sep 29 19:29:23 crc kubenswrapper[4792]: I0929 19:29:23.016407 4792 scope.go:117] "RemoveContainer" containerID="ccfabdbbd2fc28db3a7759f30ddf4fbe532580d663aea81dbf9d9f716c69f3f7" Sep 29 19:29:23 crc kubenswrapper[4792]: E0929 19:29:23.017385 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p5q59_openshift-machine-config-operator(0ae66548-086e-4ca9-bd6f-281ce46e7557)\"" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" podUID="0ae66548-086e-4ca9-bd6f-281ce46e7557" Sep 29 19:29:37 crc kubenswrapper[4792]: I0929 19:29:37.651808 4792 scope.go:117] "RemoveContainer" containerID="48dd98b540a3269fd78905a33f974f35a2c8eb8711921ab4ad616c802dbdd61e" Sep 29 19:29:38 crc kubenswrapper[4792]: I0929 19:29:38.015163 4792 scope.go:117] "RemoveContainer" containerID="ccfabdbbd2fc28db3a7759f30ddf4fbe532580d663aea81dbf9d9f716c69f3f7" Sep 29 19:29:38 crc kubenswrapper[4792]: E0929 19:29:38.015537 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p5q59_openshift-machine-config-operator(0ae66548-086e-4ca9-bd6f-281ce46e7557)\"" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" podUID="0ae66548-086e-4ca9-bd6f-281ce46e7557" Sep 29 19:29:48 crc kubenswrapper[4792]: I0929 19:29:48.022843 4792 generic.go:334] "Generic (PLEG): container finished" podID="5d30a56f-01e0-422e-99bd-08328d009094" containerID="371013bbd4c583e292e4a8dd66bcee6345caaa4539b3c4b4b660690fc959f8cc" exitCode=0 Sep 29 19:29:48 crc kubenswrapper[4792]: I0929 19:29:48.022886 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-jfzzt" event={"ID":"5d30a56f-01e0-422e-99bd-08328d009094","Type":"ContainerDied","Data":"371013bbd4c583e292e4a8dd66bcee6345caaa4539b3c4b4b660690fc959f8cc"} Sep 29 19:29:49 crc kubenswrapper[4792]: I0929 19:29:49.427587 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-jfzzt" Sep 29 19:29:49 crc kubenswrapper[4792]: I0929 19:29:49.616676 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/5d30a56f-01e0-422e-99bd-08328d009094-inventory\") pod \"5d30a56f-01e0-422e-99bd-08328d009094\" (UID: \"5d30a56f-01e0-422e-99bd-08328d009094\") " Sep 29 19:29:49 crc kubenswrapper[4792]: I0929 19:29:49.616758 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/5d30a56f-01e0-422e-99bd-08328d009094-ssh-key\") pod \"5d30a56f-01e0-422e-99bd-08328d009094\" (UID: \"5d30a56f-01e0-422e-99bd-08328d009094\") " Sep 29 19:29:49 crc kubenswrapper[4792]: I0929 19:29:49.616791 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ql77x\" (UniqueName: \"kubernetes.io/projected/5d30a56f-01e0-422e-99bd-08328d009094-kube-api-access-ql77x\") pod \"5d30a56f-01e0-422e-99bd-08328d009094\" (UID: \"5d30a56f-01e0-422e-99bd-08328d009094\") " Sep 29 19:29:49 crc kubenswrapper[4792]: I0929 19:29:49.624628 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5d30a56f-01e0-422e-99bd-08328d009094-kube-api-access-ql77x" (OuterVolumeSpecName: "kube-api-access-ql77x") pod "5d30a56f-01e0-422e-99bd-08328d009094" (UID: "5d30a56f-01e0-422e-99bd-08328d009094"). InnerVolumeSpecName "kube-api-access-ql77x". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:29:49 crc kubenswrapper[4792]: E0929 19:29:49.640276 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5d30a56f-01e0-422e-99bd-08328d009094-inventory podName:5d30a56f-01e0-422e-99bd-08328d009094 nodeName:}" failed. No retries permitted until 2025-09-29 19:29:50.140200366 +0000 UTC m=+2002.133507762 (durationBeforeRetry 500ms). Error: error cleaning subPath mounts for volume "inventory" (UniqueName: "kubernetes.io/secret/5d30a56f-01e0-422e-99bd-08328d009094-inventory") pod "5d30a56f-01e0-422e-99bd-08328d009094" (UID: "5d30a56f-01e0-422e-99bd-08328d009094") : error deleting /var/lib/kubelet/pods/5d30a56f-01e0-422e-99bd-08328d009094/volume-subpaths: remove /var/lib/kubelet/pods/5d30a56f-01e0-422e-99bd-08328d009094/volume-subpaths: no such file or directory Sep 29 19:29:49 crc kubenswrapper[4792]: I0929 19:29:49.642691 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5d30a56f-01e0-422e-99bd-08328d009094-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "5d30a56f-01e0-422e-99bd-08328d009094" (UID: "5d30a56f-01e0-422e-99bd-08328d009094"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:29:49 crc kubenswrapper[4792]: I0929 19:29:49.719350 4792 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/5d30a56f-01e0-422e-99bd-08328d009094-ssh-key\") on node \"crc\" DevicePath \"\"" Sep 29 19:29:49 crc kubenswrapper[4792]: I0929 19:29:49.719376 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ql77x\" (UniqueName: \"kubernetes.io/projected/5d30a56f-01e0-422e-99bd-08328d009094-kube-api-access-ql77x\") on node \"crc\" DevicePath \"\"" Sep 29 19:29:50 crc kubenswrapper[4792]: I0929 19:29:50.042686 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-jfzzt" event={"ID":"5d30a56f-01e0-422e-99bd-08328d009094","Type":"ContainerDied","Data":"651341d3a021a4b375c647fad7f665b8bd6f4f1342bde3c6e0b51aaa5e152e0e"} Sep 29 19:29:50 crc kubenswrapper[4792]: I0929 19:29:50.042768 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-jfzzt" Sep 29 19:29:50 crc kubenswrapper[4792]: I0929 19:29:50.042787 4792 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="651341d3a021a4b375c647fad7f665b8bd6f4f1342bde3c6e0b51aaa5e152e0e" Sep 29 19:29:50 crc kubenswrapper[4792]: I0929 19:29:50.139442 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ssh-known-hosts-edpm-deployment-5w5vz"] Sep 29 19:29:50 crc kubenswrapper[4792]: E0929 19:29:50.139780 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5d30a56f-01e0-422e-99bd-08328d009094" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Sep 29 19:29:50 crc kubenswrapper[4792]: I0929 19:29:50.139796 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="5d30a56f-01e0-422e-99bd-08328d009094" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Sep 29 19:29:50 crc kubenswrapper[4792]: I0929 19:29:50.140008 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="5d30a56f-01e0-422e-99bd-08328d009094" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Sep 29 19:29:50 crc kubenswrapper[4792]: I0929 19:29:50.140755 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-5w5vz" Sep 29 19:29:50 crc kubenswrapper[4792]: I0929 19:29:50.152107 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ssh-known-hosts-edpm-deployment-5w5vz"] Sep 29 19:29:50 crc kubenswrapper[4792]: I0929 19:29:50.230019 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/5d30a56f-01e0-422e-99bd-08328d009094-inventory\") pod \"5d30a56f-01e0-422e-99bd-08328d009094\" (UID: \"5d30a56f-01e0-422e-99bd-08328d009094\") " Sep 29 19:29:50 crc kubenswrapper[4792]: I0929 19:29:50.230471 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fng94\" (UniqueName: \"kubernetes.io/projected/352317f9-484f-4680-aea0-8ebf9c6b4e44-kube-api-access-fng94\") pod \"ssh-known-hosts-edpm-deployment-5w5vz\" (UID: \"352317f9-484f-4680-aea0-8ebf9c6b4e44\") " pod="openstack/ssh-known-hosts-edpm-deployment-5w5vz" Sep 29 19:29:50 crc kubenswrapper[4792]: I0929 19:29:50.230717 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/352317f9-484f-4680-aea0-8ebf9c6b4e44-ssh-key-openstack-edpm-ipam\") pod \"ssh-known-hosts-edpm-deployment-5w5vz\" (UID: \"352317f9-484f-4680-aea0-8ebf9c6b4e44\") " pod="openstack/ssh-known-hosts-edpm-deployment-5w5vz" Sep 29 19:29:50 crc kubenswrapper[4792]: I0929 19:29:50.230892 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/352317f9-484f-4680-aea0-8ebf9c6b4e44-inventory-0\") pod \"ssh-known-hosts-edpm-deployment-5w5vz\" (UID: \"352317f9-484f-4680-aea0-8ebf9c6b4e44\") " pod="openstack/ssh-known-hosts-edpm-deployment-5w5vz" Sep 29 19:29:50 crc kubenswrapper[4792]: I0929 19:29:50.233437 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5d30a56f-01e0-422e-99bd-08328d009094-inventory" (OuterVolumeSpecName: "inventory") pod "5d30a56f-01e0-422e-99bd-08328d009094" (UID: "5d30a56f-01e0-422e-99bd-08328d009094"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:29:50 crc kubenswrapper[4792]: I0929 19:29:50.332962 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fng94\" (UniqueName: \"kubernetes.io/projected/352317f9-484f-4680-aea0-8ebf9c6b4e44-kube-api-access-fng94\") pod \"ssh-known-hosts-edpm-deployment-5w5vz\" (UID: \"352317f9-484f-4680-aea0-8ebf9c6b4e44\") " pod="openstack/ssh-known-hosts-edpm-deployment-5w5vz" Sep 29 19:29:50 crc kubenswrapper[4792]: I0929 19:29:50.333339 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/352317f9-484f-4680-aea0-8ebf9c6b4e44-ssh-key-openstack-edpm-ipam\") pod \"ssh-known-hosts-edpm-deployment-5w5vz\" (UID: \"352317f9-484f-4680-aea0-8ebf9c6b4e44\") " pod="openstack/ssh-known-hosts-edpm-deployment-5w5vz" Sep 29 19:29:50 crc kubenswrapper[4792]: I0929 19:29:50.333487 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/352317f9-484f-4680-aea0-8ebf9c6b4e44-inventory-0\") pod \"ssh-known-hosts-edpm-deployment-5w5vz\" (UID: \"352317f9-484f-4680-aea0-8ebf9c6b4e44\") " pod="openstack/ssh-known-hosts-edpm-deployment-5w5vz" Sep 29 19:29:50 crc kubenswrapper[4792]: I0929 19:29:50.333642 4792 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/5d30a56f-01e0-422e-99bd-08328d009094-inventory\") on node \"crc\" DevicePath \"\"" Sep 29 19:29:50 crc kubenswrapper[4792]: I0929 19:29:50.336545 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/352317f9-484f-4680-aea0-8ebf9c6b4e44-inventory-0\") pod \"ssh-known-hosts-edpm-deployment-5w5vz\" (UID: \"352317f9-484f-4680-aea0-8ebf9c6b4e44\") " pod="openstack/ssh-known-hosts-edpm-deployment-5w5vz" Sep 29 19:29:50 crc kubenswrapper[4792]: I0929 19:29:50.343451 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/352317f9-484f-4680-aea0-8ebf9c6b4e44-ssh-key-openstack-edpm-ipam\") pod \"ssh-known-hosts-edpm-deployment-5w5vz\" (UID: \"352317f9-484f-4680-aea0-8ebf9c6b4e44\") " pod="openstack/ssh-known-hosts-edpm-deployment-5w5vz" Sep 29 19:29:50 crc kubenswrapper[4792]: I0929 19:29:50.352581 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fng94\" (UniqueName: \"kubernetes.io/projected/352317f9-484f-4680-aea0-8ebf9c6b4e44-kube-api-access-fng94\") pod \"ssh-known-hosts-edpm-deployment-5w5vz\" (UID: \"352317f9-484f-4680-aea0-8ebf9c6b4e44\") " pod="openstack/ssh-known-hosts-edpm-deployment-5w5vz" Sep 29 19:29:50 crc kubenswrapper[4792]: I0929 19:29:50.472801 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-5w5vz" Sep 29 19:29:50 crc kubenswrapper[4792]: I0929 19:29:50.998806 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ssh-known-hosts-edpm-deployment-5w5vz"] Sep 29 19:29:51 crc kubenswrapper[4792]: I0929 19:29:51.002837 4792 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Sep 29 19:29:51 crc kubenswrapper[4792]: I0929 19:29:51.018711 4792 scope.go:117] "RemoveContainer" containerID="ccfabdbbd2fc28db3a7759f30ddf4fbe532580d663aea81dbf9d9f716c69f3f7" Sep 29 19:29:51 crc kubenswrapper[4792]: I0929 19:29:51.053499 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-5w5vz" event={"ID":"352317f9-484f-4680-aea0-8ebf9c6b4e44","Type":"ContainerStarted","Data":"cff162afcda1885b130e8846bb78e7fde4694bf2b2d80dd6336e3528154fd4a1"} Sep 29 19:29:52 crc kubenswrapper[4792]: I0929 19:29:52.063369 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" event={"ID":"0ae66548-086e-4ca9-bd6f-281ce46e7557","Type":"ContainerStarted","Data":"b984d9990ebb9d14104b77ed41e9db98bc311c1e21b0dba6547f2b2dea1a040c"} Sep 29 19:29:52 crc kubenswrapper[4792]: I0929 19:29:52.065939 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-5w5vz" event={"ID":"352317f9-484f-4680-aea0-8ebf9c6b4e44","Type":"ContainerStarted","Data":"bfdca5149f60d3861040e07c5984c136d380618abb97a4ada763bf330eb5538e"} Sep 29 19:29:52 crc kubenswrapper[4792]: I0929 19:29:52.130990 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ssh-known-hosts-edpm-deployment-5w5vz" podStartSLOduration=1.907910037 podStartE2EDuration="2.130969341s" podCreationTimestamp="2025-09-29 19:29:50 +0000 UTC" firstStartedPulling="2025-09-29 19:29:51.002544657 +0000 UTC m=+2002.995852073" lastFinishedPulling="2025-09-29 19:29:51.225603981 +0000 UTC m=+2003.218911377" observedRunningTime="2025-09-29 19:29:52.120827845 +0000 UTC m=+2004.114135251" watchObservedRunningTime="2025-09-29 19:29:52.130969341 +0000 UTC m=+2004.124276747" Sep 29 19:29:59 crc kubenswrapper[4792]: I0929 19:29:59.118575 4792 generic.go:334] "Generic (PLEG): container finished" podID="352317f9-484f-4680-aea0-8ebf9c6b4e44" containerID="bfdca5149f60d3861040e07c5984c136d380618abb97a4ada763bf330eb5538e" exitCode=0 Sep 29 19:29:59 crc kubenswrapper[4792]: I0929 19:29:59.119019 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-5w5vz" event={"ID":"352317f9-484f-4680-aea0-8ebf9c6b4e44","Type":"ContainerDied","Data":"bfdca5149f60d3861040e07c5984c136d380618abb97a4ada763bf330eb5538e"} Sep 29 19:30:00 crc kubenswrapper[4792]: I0929 19:30:00.143518 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319570-dc9rd"] Sep 29 19:30:00 crc kubenswrapper[4792]: I0929 19:30:00.146421 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319570-dc9rd" Sep 29 19:30:00 crc kubenswrapper[4792]: I0929 19:30:00.149207 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Sep 29 19:30:00 crc kubenswrapper[4792]: I0929 19:30:00.149909 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Sep 29 19:30:00 crc kubenswrapper[4792]: I0929 19:30:00.153450 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319570-dc9rd"] Sep 29 19:30:00 crc kubenswrapper[4792]: I0929 19:30:00.239232 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/584d7b6c-450e-4739-91c4-6a89c1ab2487-config-volume\") pod \"collect-profiles-29319570-dc9rd\" (UID: \"584d7b6c-450e-4739-91c4-6a89c1ab2487\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319570-dc9rd" Sep 29 19:30:00 crc kubenswrapper[4792]: I0929 19:30:00.239292 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/584d7b6c-450e-4739-91c4-6a89c1ab2487-secret-volume\") pod \"collect-profiles-29319570-dc9rd\" (UID: \"584d7b6c-450e-4739-91c4-6a89c1ab2487\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319570-dc9rd" Sep 29 19:30:00 crc kubenswrapper[4792]: I0929 19:30:00.239435 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tf96r\" (UniqueName: \"kubernetes.io/projected/584d7b6c-450e-4739-91c4-6a89c1ab2487-kube-api-access-tf96r\") pod \"collect-profiles-29319570-dc9rd\" (UID: \"584d7b6c-450e-4739-91c4-6a89c1ab2487\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319570-dc9rd" Sep 29 19:30:00 crc kubenswrapper[4792]: I0929 19:30:00.340449 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/584d7b6c-450e-4739-91c4-6a89c1ab2487-secret-volume\") pod \"collect-profiles-29319570-dc9rd\" (UID: \"584d7b6c-450e-4739-91c4-6a89c1ab2487\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319570-dc9rd" Sep 29 19:30:00 crc kubenswrapper[4792]: I0929 19:30:00.340599 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tf96r\" (UniqueName: \"kubernetes.io/projected/584d7b6c-450e-4739-91c4-6a89c1ab2487-kube-api-access-tf96r\") pod \"collect-profiles-29319570-dc9rd\" (UID: \"584d7b6c-450e-4739-91c4-6a89c1ab2487\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319570-dc9rd" Sep 29 19:30:00 crc kubenswrapper[4792]: I0929 19:30:00.340658 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/584d7b6c-450e-4739-91c4-6a89c1ab2487-config-volume\") pod \"collect-profiles-29319570-dc9rd\" (UID: \"584d7b6c-450e-4739-91c4-6a89c1ab2487\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319570-dc9rd" Sep 29 19:30:00 crc kubenswrapper[4792]: I0929 19:30:00.341538 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/584d7b6c-450e-4739-91c4-6a89c1ab2487-config-volume\") pod \"collect-profiles-29319570-dc9rd\" (UID: \"584d7b6c-450e-4739-91c4-6a89c1ab2487\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319570-dc9rd" Sep 29 19:30:00 crc kubenswrapper[4792]: I0929 19:30:00.351025 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/584d7b6c-450e-4739-91c4-6a89c1ab2487-secret-volume\") pod \"collect-profiles-29319570-dc9rd\" (UID: \"584d7b6c-450e-4739-91c4-6a89c1ab2487\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319570-dc9rd" Sep 29 19:30:00 crc kubenswrapper[4792]: I0929 19:30:00.360480 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tf96r\" (UniqueName: \"kubernetes.io/projected/584d7b6c-450e-4739-91c4-6a89c1ab2487-kube-api-access-tf96r\") pod \"collect-profiles-29319570-dc9rd\" (UID: \"584d7b6c-450e-4739-91c4-6a89c1ab2487\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319570-dc9rd" Sep 29 19:30:00 crc kubenswrapper[4792]: I0929 19:30:00.468939 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319570-dc9rd" Sep 29 19:30:00 crc kubenswrapper[4792]: I0929 19:30:00.541768 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-5w5vz" Sep 29 19:30:00 crc kubenswrapper[4792]: I0929 19:30:00.645560 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fng94\" (UniqueName: \"kubernetes.io/projected/352317f9-484f-4680-aea0-8ebf9c6b4e44-kube-api-access-fng94\") pod \"352317f9-484f-4680-aea0-8ebf9c6b4e44\" (UID: \"352317f9-484f-4680-aea0-8ebf9c6b4e44\") " Sep 29 19:30:00 crc kubenswrapper[4792]: I0929 19:30:00.645828 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/352317f9-484f-4680-aea0-8ebf9c6b4e44-ssh-key-openstack-edpm-ipam\") pod \"352317f9-484f-4680-aea0-8ebf9c6b4e44\" (UID: \"352317f9-484f-4680-aea0-8ebf9c6b4e44\") " Sep 29 19:30:00 crc kubenswrapper[4792]: I0929 19:30:00.645906 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/352317f9-484f-4680-aea0-8ebf9c6b4e44-inventory-0\") pod \"352317f9-484f-4680-aea0-8ebf9c6b4e44\" (UID: \"352317f9-484f-4680-aea0-8ebf9c6b4e44\") " Sep 29 19:30:00 crc kubenswrapper[4792]: I0929 19:30:00.651750 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/352317f9-484f-4680-aea0-8ebf9c6b4e44-kube-api-access-fng94" (OuterVolumeSpecName: "kube-api-access-fng94") pod "352317f9-484f-4680-aea0-8ebf9c6b4e44" (UID: "352317f9-484f-4680-aea0-8ebf9c6b4e44"). InnerVolumeSpecName "kube-api-access-fng94". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:30:00 crc kubenswrapper[4792]: I0929 19:30:00.687563 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/352317f9-484f-4680-aea0-8ebf9c6b4e44-ssh-key-openstack-edpm-ipam" (OuterVolumeSpecName: "ssh-key-openstack-edpm-ipam") pod "352317f9-484f-4680-aea0-8ebf9c6b4e44" (UID: "352317f9-484f-4680-aea0-8ebf9c6b4e44"). InnerVolumeSpecName "ssh-key-openstack-edpm-ipam". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:30:00 crc kubenswrapper[4792]: I0929 19:30:00.706792 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/352317f9-484f-4680-aea0-8ebf9c6b4e44-inventory-0" (OuterVolumeSpecName: "inventory-0") pod "352317f9-484f-4680-aea0-8ebf9c6b4e44" (UID: "352317f9-484f-4680-aea0-8ebf9c6b4e44"). InnerVolumeSpecName "inventory-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:30:00 crc kubenswrapper[4792]: I0929 19:30:00.749259 4792 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/352317f9-484f-4680-aea0-8ebf9c6b4e44-ssh-key-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Sep 29 19:30:00 crc kubenswrapper[4792]: I0929 19:30:00.749311 4792 reconciler_common.go:293] "Volume detached for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/352317f9-484f-4680-aea0-8ebf9c6b4e44-inventory-0\") on node \"crc\" DevicePath \"\"" Sep 29 19:30:00 crc kubenswrapper[4792]: I0929 19:30:00.749325 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fng94\" (UniqueName: \"kubernetes.io/projected/352317f9-484f-4680-aea0-8ebf9c6b4e44-kube-api-access-fng94\") on node \"crc\" DevicePath \"\"" Sep 29 19:30:00 crc kubenswrapper[4792]: I0929 19:30:00.941726 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319570-dc9rd"] Sep 29 19:30:01 crc kubenswrapper[4792]: I0929 19:30:01.139168 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29319570-dc9rd" event={"ID":"584d7b6c-450e-4739-91c4-6a89c1ab2487","Type":"ContainerStarted","Data":"338313961c095e2453e882321216d21854e5164a510a62e4344d0e002e3d24b1"} Sep 29 19:30:01 crc kubenswrapper[4792]: I0929 19:30:01.139479 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29319570-dc9rd" event={"ID":"584d7b6c-450e-4739-91c4-6a89c1ab2487","Type":"ContainerStarted","Data":"4a1b43e048221b117bb2ee1a53dfbcd8cf5c4d5a520a595f7ad86375c0a6492b"} Sep 29 19:30:01 crc kubenswrapper[4792]: I0929 19:30:01.141537 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-5w5vz" event={"ID":"352317f9-484f-4680-aea0-8ebf9c6b4e44","Type":"ContainerDied","Data":"cff162afcda1885b130e8846bb78e7fde4694bf2b2d80dd6336e3528154fd4a1"} Sep 29 19:30:01 crc kubenswrapper[4792]: I0929 19:30:01.141562 4792 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="cff162afcda1885b130e8846bb78e7fde4694bf2b2d80dd6336e3528154fd4a1" Sep 29 19:30:01 crc kubenswrapper[4792]: I0929 19:30:01.141712 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-5w5vz" Sep 29 19:30:01 crc kubenswrapper[4792]: I0929 19:30:01.157987 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29319570-dc9rd" podStartSLOduration=1.157971475 podStartE2EDuration="1.157971475s" podCreationTimestamp="2025-09-29 19:30:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 19:30:01.155357426 +0000 UTC m=+2013.148664842" watchObservedRunningTime="2025-09-29 19:30:01.157971475 +0000 UTC m=+2013.151278871" Sep 29 19:30:01 crc kubenswrapper[4792]: I0929 19:30:01.208448 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/run-os-edpm-deployment-openstack-edpm-ipam-89vq8"] Sep 29 19:30:01 crc kubenswrapper[4792]: E0929 19:30:01.209183 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="352317f9-484f-4680-aea0-8ebf9c6b4e44" containerName="ssh-known-hosts-edpm-deployment" Sep 29 19:30:01 crc kubenswrapper[4792]: I0929 19:30:01.209209 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="352317f9-484f-4680-aea0-8ebf9c6b4e44" containerName="ssh-known-hosts-edpm-deployment" Sep 29 19:30:01 crc kubenswrapper[4792]: I0929 19:30:01.209437 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="352317f9-484f-4680-aea0-8ebf9c6b4e44" containerName="ssh-known-hosts-edpm-deployment" Sep 29 19:30:01 crc kubenswrapper[4792]: I0929 19:30:01.210256 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-89vq8" Sep 29 19:30:01 crc kubenswrapper[4792]: I0929 19:30:01.214204 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Sep 29 19:30:01 crc kubenswrapper[4792]: I0929 19:30:01.214329 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Sep 29 19:30:01 crc kubenswrapper[4792]: I0929 19:30:01.214386 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Sep 29 19:30:01 crc kubenswrapper[4792]: I0929 19:30:01.217526 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-jvdxd" Sep 29 19:30:01 crc kubenswrapper[4792]: I0929 19:30:01.242806 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/run-os-edpm-deployment-openstack-edpm-ipam-89vq8"] Sep 29 19:30:01 crc kubenswrapper[4792]: I0929 19:30:01.257742 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c9424cca-92f5-490d-9a25-5feaa7010200-inventory\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-89vq8\" (UID: \"c9424cca-92f5-490d-9a25-5feaa7010200\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-89vq8" Sep 29 19:30:01 crc kubenswrapper[4792]: I0929 19:30:01.257802 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c9424cca-92f5-490d-9a25-5feaa7010200-ssh-key\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-89vq8\" (UID: \"c9424cca-92f5-490d-9a25-5feaa7010200\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-89vq8" Sep 29 19:30:01 crc kubenswrapper[4792]: I0929 19:30:01.257841 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p5hzl\" (UniqueName: \"kubernetes.io/projected/c9424cca-92f5-490d-9a25-5feaa7010200-kube-api-access-p5hzl\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-89vq8\" (UID: \"c9424cca-92f5-490d-9a25-5feaa7010200\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-89vq8" Sep 29 19:30:01 crc kubenswrapper[4792]: I0929 19:30:01.359971 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c9424cca-92f5-490d-9a25-5feaa7010200-inventory\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-89vq8\" (UID: \"c9424cca-92f5-490d-9a25-5feaa7010200\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-89vq8" Sep 29 19:30:01 crc kubenswrapper[4792]: I0929 19:30:01.360023 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c9424cca-92f5-490d-9a25-5feaa7010200-ssh-key\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-89vq8\" (UID: \"c9424cca-92f5-490d-9a25-5feaa7010200\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-89vq8" Sep 29 19:30:01 crc kubenswrapper[4792]: I0929 19:30:01.360080 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p5hzl\" (UniqueName: \"kubernetes.io/projected/c9424cca-92f5-490d-9a25-5feaa7010200-kube-api-access-p5hzl\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-89vq8\" (UID: \"c9424cca-92f5-490d-9a25-5feaa7010200\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-89vq8" Sep 29 19:30:01 crc kubenswrapper[4792]: I0929 19:30:01.366868 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c9424cca-92f5-490d-9a25-5feaa7010200-ssh-key\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-89vq8\" (UID: \"c9424cca-92f5-490d-9a25-5feaa7010200\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-89vq8" Sep 29 19:30:01 crc kubenswrapper[4792]: I0929 19:30:01.367039 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c9424cca-92f5-490d-9a25-5feaa7010200-inventory\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-89vq8\" (UID: \"c9424cca-92f5-490d-9a25-5feaa7010200\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-89vq8" Sep 29 19:30:01 crc kubenswrapper[4792]: I0929 19:30:01.379627 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p5hzl\" (UniqueName: \"kubernetes.io/projected/c9424cca-92f5-490d-9a25-5feaa7010200-kube-api-access-p5hzl\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-89vq8\" (UID: \"c9424cca-92f5-490d-9a25-5feaa7010200\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-89vq8" Sep 29 19:30:01 crc kubenswrapper[4792]: I0929 19:30:01.541281 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-89vq8" Sep 29 19:30:02 crc kubenswrapper[4792]: I0929 19:30:02.075747 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/run-os-edpm-deployment-openstack-edpm-ipam-89vq8"] Sep 29 19:30:02 crc kubenswrapper[4792]: W0929 19:30:02.081700 4792 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc9424cca_92f5_490d_9a25_5feaa7010200.slice/crio-b28185dd53abdc3edd53d3cbb76b3ab8333b63b1df8649c8cdb0c4bc73e12444 WatchSource:0}: Error finding container b28185dd53abdc3edd53d3cbb76b3ab8333b63b1df8649c8cdb0c4bc73e12444: Status 404 returned error can't find the container with id b28185dd53abdc3edd53d3cbb76b3ab8333b63b1df8649c8cdb0c4bc73e12444 Sep 29 19:30:02 crc kubenswrapper[4792]: I0929 19:30:02.158565 4792 generic.go:334] "Generic (PLEG): container finished" podID="584d7b6c-450e-4739-91c4-6a89c1ab2487" containerID="338313961c095e2453e882321216d21854e5164a510a62e4344d0e002e3d24b1" exitCode=0 Sep 29 19:30:02 crc kubenswrapper[4792]: I0929 19:30:02.158715 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29319570-dc9rd" event={"ID":"584d7b6c-450e-4739-91c4-6a89c1ab2487","Type":"ContainerDied","Data":"338313961c095e2453e882321216d21854e5164a510a62e4344d0e002e3d24b1"} Sep 29 19:30:02 crc kubenswrapper[4792]: I0929 19:30:02.160495 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-89vq8" event={"ID":"c9424cca-92f5-490d-9a25-5feaa7010200","Type":"ContainerStarted","Data":"b28185dd53abdc3edd53d3cbb76b3ab8333b63b1df8649c8cdb0c4bc73e12444"} Sep 29 19:30:03 crc kubenswrapper[4792]: I0929 19:30:03.169367 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-89vq8" event={"ID":"c9424cca-92f5-490d-9a25-5feaa7010200","Type":"ContainerStarted","Data":"d491b7019e5d4742a9a4909f83bea77474efefd193f616fc72a1c7fd264a684f"} Sep 29 19:30:03 crc kubenswrapper[4792]: I0929 19:30:03.188310 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-89vq8" podStartSLOduration=1.993995094 podStartE2EDuration="2.188294116s" podCreationTimestamp="2025-09-29 19:30:01 +0000 UTC" firstStartedPulling="2025-09-29 19:30:02.084956839 +0000 UTC m=+2014.078264235" lastFinishedPulling="2025-09-29 19:30:02.279255861 +0000 UTC m=+2014.272563257" observedRunningTime="2025-09-29 19:30:03.185079352 +0000 UTC m=+2015.178386748" watchObservedRunningTime="2025-09-29 19:30:03.188294116 +0000 UTC m=+2015.181601512" Sep 29 19:30:03 crc kubenswrapper[4792]: I0929 19:30:03.503488 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319570-dc9rd" Sep 29 19:30:03 crc kubenswrapper[4792]: I0929 19:30:03.624702 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/584d7b6c-450e-4739-91c4-6a89c1ab2487-config-volume\") pod \"584d7b6c-450e-4739-91c4-6a89c1ab2487\" (UID: \"584d7b6c-450e-4739-91c4-6a89c1ab2487\") " Sep 29 19:30:03 crc kubenswrapper[4792]: I0929 19:30:03.624822 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/584d7b6c-450e-4739-91c4-6a89c1ab2487-secret-volume\") pod \"584d7b6c-450e-4739-91c4-6a89c1ab2487\" (UID: \"584d7b6c-450e-4739-91c4-6a89c1ab2487\") " Sep 29 19:30:03 crc kubenswrapper[4792]: I0929 19:30:03.625517 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tf96r\" (UniqueName: \"kubernetes.io/projected/584d7b6c-450e-4739-91c4-6a89c1ab2487-kube-api-access-tf96r\") pod \"584d7b6c-450e-4739-91c4-6a89c1ab2487\" (UID: \"584d7b6c-450e-4739-91c4-6a89c1ab2487\") " Sep 29 19:30:03 crc kubenswrapper[4792]: I0929 19:30:03.625698 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/584d7b6c-450e-4739-91c4-6a89c1ab2487-config-volume" (OuterVolumeSpecName: "config-volume") pod "584d7b6c-450e-4739-91c4-6a89c1ab2487" (UID: "584d7b6c-450e-4739-91c4-6a89c1ab2487"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:30:03 crc kubenswrapper[4792]: I0929 19:30:03.626451 4792 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/584d7b6c-450e-4739-91c4-6a89c1ab2487-config-volume\") on node \"crc\" DevicePath \"\"" Sep 29 19:30:03 crc kubenswrapper[4792]: I0929 19:30:03.632290 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/584d7b6c-450e-4739-91c4-6a89c1ab2487-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "584d7b6c-450e-4739-91c4-6a89c1ab2487" (UID: "584d7b6c-450e-4739-91c4-6a89c1ab2487"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:30:03 crc kubenswrapper[4792]: I0929 19:30:03.641487 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/584d7b6c-450e-4739-91c4-6a89c1ab2487-kube-api-access-tf96r" (OuterVolumeSpecName: "kube-api-access-tf96r") pod "584d7b6c-450e-4739-91c4-6a89c1ab2487" (UID: "584d7b6c-450e-4739-91c4-6a89c1ab2487"). InnerVolumeSpecName "kube-api-access-tf96r". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:30:03 crc kubenswrapper[4792]: I0929 19:30:03.727832 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tf96r\" (UniqueName: \"kubernetes.io/projected/584d7b6c-450e-4739-91c4-6a89c1ab2487-kube-api-access-tf96r\") on node \"crc\" DevicePath \"\"" Sep 29 19:30:03 crc kubenswrapper[4792]: I0929 19:30:03.727918 4792 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/584d7b6c-450e-4739-91c4-6a89c1ab2487-secret-volume\") on node \"crc\" DevicePath \"\"" Sep 29 19:30:04 crc kubenswrapper[4792]: I0929 19:30:04.180749 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319570-dc9rd" Sep 29 19:30:04 crc kubenswrapper[4792]: I0929 19:30:04.180796 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29319570-dc9rd" event={"ID":"584d7b6c-450e-4739-91c4-6a89c1ab2487","Type":"ContainerDied","Data":"4a1b43e048221b117bb2ee1a53dfbcd8cf5c4d5a520a595f7ad86375c0a6492b"} Sep 29 19:30:04 crc kubenswrapper[4792]: I0929 19:30:04.180838 4792 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4a1b43e048221b117bb2ee1a53dfbcd8cf5c4d5a520a595f7ad86375c0a6492b" Sep 29 19:30:04 crc kubenswrapper[4792]: I0929 19:30:04.268286 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319525-kwqwh"] Sep 29 19:30:04 crc kubenswrapper[4792]: I0929 19:30:04.274015 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319525-kwqwh"] Sep 29 19:30:05 crc kubenswrapper[4792]: I0929 19:30:05.026538 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a5fc467b-0e4c-4f20-9729-56906756b33d" path="/var/lib/kubelet/pods/a5fc467b-0e4c-4f20-9729-56906756b33d/volumes" Sep 29 19:30:13 crc kubenswrapper[4792]: I0929 19:30:13.279169 4792 generic.go:334] "Generic (PLEG): container finished" podID="c9424cca-92f5-490d-9a25-5feaa7010200" containerID="d491b7019e5d4742a9a4909f83bea77474efefd193f616fc72a1c7fd264a684f" exitCode=0 Sep 29 19:30:13 crc kubenswrapper[4792]: I0929 19:30:13.279280 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-89vq8" event={"ID":"c9424cca-92f5-490d-9a25-5feaa7010200","Type":"ContainerDied","Data":"d491b7019e5d4742a9a4909f83bea77474efefd193f616fc72a1c7fd264a684f"} Sep 29 19:30:14 crc kubenswrapper[4792]: I0929 19:30:14.731019 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-89vq8" Sep 29 19:30:14 crc kubenswrapper[4792]: I0929 19:30:14.872156 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c9424cca-92f5-490d-9a25-5feaa7010200-ssh-key\") pod \"c9424cca-92f5-490d-9a25-5feaa7010200\" (UID: \"c9424cca-92f5-490d-9a25-5feaa7010200\") " Sep 29 19:30:14 crc kubenswrapper[4792]: I0929 19:30:14.872239 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c9424cca-92f5-490d-9a25-5feaa7010200-inventory\") pod \"c9424cca-92f5-490d-9a25-5feaa7010200\" (UID: \"c9424cca-92f5-490d-9a25-5feaa7010200\") " Sep 29 19:30:14 crc kubenswrapper[4792]: I0929 19:30:14.872368 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-p5hzl\" (UniqueName: \"kubernetes.io/projected/c9424cca-92f5-490d-9a25-5feaa7010200-kube-api-access-p5hzl\") pod \"c9424cca-92f5-490d-9a25-5feaa7010200\" (UID: \"c9424cca-92f5-490d-9a25-5feaa7010200\") " Sep 29 19:30:14 crc kubenswrapper[4792]: I0929 19:30:14.887641 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c9424cca-92f5-490d-9a25-5feaa7010200-kube-api-access-p5hzl" (OuterVolumeSpecName: "kube-api-access-p5hzl") pod "c9424cca-92f5-490d-9a25-5feaa7010200" (UID: "c9424cca-92f5-490d-9a25-5feaa7010200"). InnerVolumeSpecName "kube-api-access-p5hzl". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:30:14 crc kubenswrapper[4792]: I0929 19:30:14.904353 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c9424cca-92f5-490d-9a25-5feaa7010200-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "c9424cca-92f5-490d-9a25-5feaa7010200" (UID: "c9424cca-92f5-490d-9a25-5feaa7010200"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:30:14 crc kubenswrapper[4792]: I0929 19:30:14.907285 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c9424cca-92f5-490d-9a25-5feaa7010200-inventory" (OuterVolumeSpecName: "inventory") pod "c9424cca-92f5-490d-9a25-5feaa7010200" (UID: "c9424cca-92f5-490d-9a25-5feaa7010200"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:30:14 crc kubenswrapper[4792]: I0929 19:30:14.974475 4792 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c9424cca-92f5-490d-9a25-5feaa7010200-ssh-key\") on node \"crc\" DevicePath \"\"" Sep 29 19:30:14 crc kubenswrapper[4792]: I0929 19:30:14.974500 4792 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c9424cca-92f5-490d-9a25-5feaa7010200-inventory\") on node \"crc\" DevicePath \"\"" Sep 29 19:30:14 crc kubenswrapper[4792]: I0929 19:30:14.974511 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-p5hzl\" (UniqueName: \"kubernetes.io/projected/c9424cca-92f5-490d-9a25-5feaa7010200-kube-api-access-p5hzl\") on node \"crc\" DevicePath \"\"" Sep 29 19:30:15 crc kubenswrapper[4792]: I0929 19:30:15.299504 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-89vq8" event={"ID":"c9424cca-92f5-490d-9a25-5feaa7010200","Type":"ContainerDied","Data":"b28185dd53abdc3edd53d3cbb76b3ab8333b63b1df8649c8cdb0c4bc73e12444"} Sep 29 19:30:15 crc kubenswrapper[4792]: I0929 19:30:15.299548 4792 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b28185dd53abdc3edd53d3cbb76b3ab8333b63b1df8649c8cdb0c4bc73e12444" Sep 29 19:30:15 crc kubenswrapper[4792]: I0929 19:30:15.299596 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-89vq8" Sep 29 19:30:15 crc kubenswrapper[4792]: I0929 19:30:15.404251 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-25chb"] Sep 29 19:30:15 crc kubenswrapper[4792]: E0929 19:30:15.404822 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c9424cca-92f5-490d-9a25-5feaa7010200" containerName="run-os-edpm-deployment-openstack-edpm-ipam" Sep 29 19:30:15 crc kubenswrapper[4792]: I0929 19:30:15.404854 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="c9424cca-92f5-490d-9a25-5feaa7010200" containerName="run-os-edpm-deployment-openstack-edpm-ipam" Sep 29 19:30:15 crc kubenswrapper[4792]: E0929 19:30:15.404925 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="584d7b6c-450e-4739-91c4-6a89c1ab2487" containerName="collect-profiles" Sep 29 19:30:15 crc kubenswrapper[4792]: I0929 19:30:15.404934 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="584d7b6c-450e-4739-91c4-6a89c1ab2487" containerName="collect-profiles" Sep 29 19:30:15 crc kubenswrapper[4792]: I0929 19:30:15.405162 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="584d7b6c-450e-4739-91c4-6a89c1ab2487" containerName="collect-profiles" Sep 29 19:30:15 crc kubenswrapper[4792]: I0929 19:30:15.405197 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="c9424cca-92f5-490d-9a25-5feaa7010200" containerName="run-os-edpm-deployment-openstack-edpm-ipam" Sep 29 19:30:15 crc kubenswrapper[4792]: I0929 19:30:15.406322 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-25chb" Sep 29 19:30:15 crc kubenswrapper[4792]: I0929 19:30:15.411828 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-jvdxd" Sep 29 19:30:15 crc kubenswrapper[4792]: I0929 19:30:15.412146 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Sep 29 19:30:15 crc kubenswrapper[4792]: I0929 19:30:15.412834 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Sep 29 19:30:15 crc kubenswrapper[4792]: I0929 19:30:15.413119 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Sep 29 19:30:15 crc kubenswrapper[4792]: I0929 19:30:15.420671 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-25chb"] Sep 29 19:30:15 crc kubenswrapper[4792]: I0929 19:30:15.483474 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/3c2d29d5-9c65-4cb4-b66d-aeffaff2201f-ssh-key\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-25chb\" (UID: \"3c2d29d5-9c65-4cb4-b66d-aeffaff2201f\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-25chb" Sep 29 19:30:15 crc kubenswrapper[4792]: I0929 19:30:15.483555 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6wv5h\" (UniqueName: \"kubernetes.io/projected/3c2d29d5-9c65-4cb4-b66d-aeffaff2201f-kube-api-access-6wv5h\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-25chb\" (UID: \"3c2d29d5-9c65-4cb4-b66d-aeffaff2201f\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-25chb" Sep 29 19:30:15 crc kubenswrapper[4792]: I0929 19:30:15.483629 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/3c2d29d5-9c65-4cb4-b66d-aeffaff2201f-inventory\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-25chb\" (UID: \"3c2d29d5-9c65-4cb4-b66d-aeffaff2201f\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-25chb" Sep 29 19:30:15 crc kubenswrapper[4792]: I0929 19:30:15.588662 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/3c2d29d5-9c65-4cb4-b66d-aeffaff2201f-ssh-key\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-25chb\" (UID: \"3c2d29d5-9c65-4cb4-b66d-aeffaff2201f\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-25chb" Sep 29 19:30:15 crc kubenswrapper[4792]: I0929 19:30:15.588845 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6wv5h\" (UniqueName: \"kubernetes.io/projected/3c2d29d5-9c65-4cb4-b66d-aeffaff2201f-kube-api-access-6wv5h\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-25chb\" (UID: \"3c2d29d5-9c65-4cb4-b66d-aeffaff2201f\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-25chb" Sep 29 19:30:15 crc kubenswrapper[4792]: I0929 19:30:15.589074 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/3c2d29d5-9c65-4cb4-b66d-aeffaff2201f-inventory\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-25chb\" (UID: \"3c2d29d5-9c65-4cb4-b66d-aeffaff2201f\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-25chb" Sep 29 19:30:15 crc kubenswrapper[4792]: I0929 19:30:15.601532 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/3c2d29d5-9c65-4cb4-b66d-aeffaff2201f-ssh-key\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-25chb\" (UID: \"3c2d29d5-9c65-4cb4-b66d-aeffaff2201f\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-25chb" Sep 29 19:30:15 crc kubenswrapper[4792]: I0929 19:30:15.603507 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/3c2d29d5-9c65-4cb4-b66d-aeffaff2201f-inventory\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-25chb\" (UID: \"3c2d29d5-9c65-4cb4-b66d-aeffaff2201f\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-25chb" Sep 29 19:30:15 crc kubenswrapper[4792]: I0929 19:30:15.610124 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6wv5h\" (UniqueName: \"kubernetes.io/projected/3c2d29d5-9c65-4cb4-b66d-aeffaff2201f-kube-api-access-6wv5h\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-25chb\" (UID: \"3c2d29d5-9c65-4cb4-b66d-aeffaff2201f\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-25chb" Sep 29 19:30:15 crc kubenswrapper[4792]: I0929 19:30:15.741151 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-25chb" Sep 29 19:30:16 crc kubenswrapper[4792]: I0929 19:30:16.304226 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-25chb"] Sep 29 19:30:16 crc kubenswrapper[4792]: W0929 19:30:16.322460 4792 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3c2d29d5_9c65_4cb4_b66d_aeffaff2201f.slice/crio-79cac87220b68f56e5ed9650d36dd0c744ceaa03d1f7d67700c36bcbcb496352 WatchSource:0}: Error finding container 79cac87220b68f56e5ed9650d36dd0c744ceaa03d1f7d67700c36bcbcb496352: Status 404 returned error can't find the container with id 79cac87220b68f56e5ed9650d36dd0c744ceaa03d1f7d67700c36bcbcb496352 Sep 29 19:30:17 crc kubenswrapper[4792]: I0929 19:30:17.320747 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-25chb" event={"ID":"3c2d29d5-9c65-4cb4-b66d-aeffaff2201f","Type":"ContainerStarted","Data":"104a75bc502085d88ca42051cc819939724b11a9409268a8b7a54b5f080bc6f1"} Sep 29 19:30:17 crc kubenswrapper[4792]: I0929 19:30:17.321089 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-25chb" event={"ID":"3c2d29d5-9c65-4cb4-b66d-aeffaff2201f","Type":"ContainerStarted","Data":"79cac87220b68f56e5ed9650d36dd0c744ceaa03d1f7d67700c36bcbcb496352"} Sep 29 19:30:17 crc kubenswrapper[4792]: I0929 19:30:17.342855 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-25chb" podStartSLOduration=2.17625043 podStartE2EDuration="2.342834577s" podCreationTimestamp="2025-09-29 19:30:15 +0000 UTC" firstStartedPulling="2025-09-29 19:30:16.32857972 +0000 UTC m=+2028.321887116" lastFinishedPulling="2025-09-29 19:30:16.495163867 +0000 UTC m=+2028.488471263" observedRunningTime="2025-09-29 19:30:17.33721946 +0000 UTC m=+2029.330526856" watchObservedRunningTime="2025-09-29 19:30:17.342834577 +0000 UTC m=+2029.336141973" Sep 29 19:30:21 crc kubenswrapper[4792]: I0929 19:30:21.056405 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-22p5m"] Sep 29 19:30:21 crc kubenswrapper[4792]: I0929 19:30:21.060070 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-22p5m" Sep 29 19:30:21 crc kubenswrapper[4792]: I0929 19:30:21.083050 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-22p5m"] Sep 29 19:30:21 crc kubenswrapper[4792]: I0929 19:30:21.206558 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/af51ad14-2117-48ef-875b-c1da5013d1a9-utilities\") pod \"community-operators-22p5m\" (UID: \"af51ad14-2117-48ef-875b-c1da5013d1a9\") " pod="openshift-marketplace/community-operators-22p5m" Sep 29 19:30:21 crc kubenswrapper[4792]: I0929 19:30:21.206637 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v4r8n\" (UniqueName: \"kubernetes.io/projected/af51ad14-2117-48ef-875b-c1da5013d1a9-kube-api-access-v4r8n\") pod \"community-operators-22p5m\" (UID: \"af51ad14-2117-48ef-875b-c1da5013d1a9\") " pod="openshift-marketplace/community-operators-22p5m" Sep 29 19:30:21 crc kubenswrapper[4792]: I0929 19:30:21.206848 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/af51ad14-2117-48ef-875b-c1da5013d1a9-catalog-content\") pod \"community-operators-22p5m\" (UID: \"af51ad14-2117-48ef-875b-c1da5013d1a9\") " pod="openshift-marketplace/community-operators-22p5m" Sep 29 19:30:21 crc kubenswrapper[4792]: I0929 19:30:21.308588 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/af51ad14-2117-48ef-875b-c1da5013d1a9-catalog-content\") pod \"community-operators-22p5m\" (UID: \"af51ad14-2117-48ef-875b-c1da5013d1a9\") " pod="openshift-marketplace/community-operators-22p5m" Sep 29 19:30:21 crc kubenswrapper[4792]: I0929 19:30:21.308628 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/af51ad14-2117-48ef-875b-c1da5013d1a9-utilities\") pod \"community-operators-22p5m\" (UID: \"af51ad14-2117-48ef-875b-c1da5013d1a9\") " pod="openshift-marketplace/community-operators-22p5m" Sep 29 19:30:21 crc kubenswrapper[4792]: I0929 19:30:21.308664 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v4r8n\" (UniqueName: \"kubernetes.io/projected/af51ad14-2117-48ef-875b-c1da5013d1a9-kube-api-access-v4r8n\") pod \"community-operators-22p5m\" (UID: \"af51ad14-2117-48ef-875b-c1da5013d1a9\") " pod="openshift-marketplace/community-operators-22p5m" Sep 29 19:30:21 crc kubenswrapper[4792]: I0929 19:30:21.309181 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/af51ad14-2117-48ef-875b-c1da5013d1a9-catalog-content\") pod \"community-operators-22p5m\" (UID: \"af51ad14-2117-48ef-875b-c1da5013d1a9\") " pod="openshift-marketplace/community-operators-22p5m" Sep 29 19:30:21 crc kubenswrapper[4792]: I0929 19:30:21.309211 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/af51ad14-2117-48ef-875b-c1da5013d1a9-utilities\") pod \"community-operators-22p5m\" (UID: \"af51ad14-2117-48ef-875b-c1da5013d1a9\") " pod="openshift-marketplace/community-operators-22p5m" Sep 29 19:30:21 crc kubenswrapper[4792]: I0929 19:30:21.327943 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v4r8n\" (UniqueName: \"kubernetes.io/projected/af51ad14-2117-48ef-875b-c1da5013d1a9-kube-api-access-v4r8n\") pod \"community-operators-22p5m\" (UID: \"af51ad14-2117-48ef-875b-c1da5013d1a9\") " pod="openshift-marketplace/community-operators-22p5m" Sep 29 19:30:21 crc kubenswrapper[4792]: I0929 19:30:21.397619 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-22p5m" Sep 29 19:30:21 crc kubenswrapper[4792]: I0929 19:30:21.912787 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-22p5m"] Sep 29 19:30:21 crc kubenswrapper[4792]: W0929 19:30:21.918421 4792 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podaf51ad14_2117_48ef_875b_c1da5013d1a9.slice/crio-515ef96460f1510a167280fe1f21c52245a87b8eeb9d1f9db784156c1e7c028d WatchSource:0}: Error finding container 515ef96460f1510a167280fe1f21c52245a87b8eeb9d1f9db784156c1e7c028d: Status 404 returned error can't find the container with id 515ef96460f1510a167280fe1f21c52245a87b8eeb9d1f9db784156c1e7c028d Sep 29 19:30:22 crc kubenswrapper[4792]: I0929 19:30:22.390125 4792 generic.go:334] "Generic (PLEG): container finished" podID="af51ad14-2117-48ef-875b-c1da5013d1a9" containerID="707ae6073bf1a9855d9bcbbe81db0c13233dc90d3fd51df92434d817f9589304" exitCode=0 Sep 29 19:30:22 crc kubenswrapper[4792]: I0929 19:30:22.390384 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-22p5m" event={"ID":"af51ad14-2117-48ef-875b-c1da5013d1a9","Type":"ContainerDied","Data":"707ae6073bf1a9855d9bcbbe81db0c13233dc90d3fd51df92434d817f9589304"} Sep 29 19:30:22 crc kubenswrapper[4792]: I0929 19:30:22.390410 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-22p5m" event={"ID":"af51ad14-2117-48ef-875b-c1da5013d1a9","Type":"ContainerStarted","Data":"515ef96460f1510a167280fe1f21c52245a87b8eeb9d1f9db784156c1e7c028d"} Sep 29 19:30:24 crc kubenswrapper[4792]: I0929 19:30:24.408986 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-22p5m" event={"ID":"af51ad14-2117-48ef-875b-c1da5013d1a9","Type":"ContainerStarted","Data":"67c8bc1e1dbc67613142f5751677a78902b3963268cd53588bcd1f01df68ae7a"} Sep 29 19:30:25 crc kubenswrapper[4792]: I0929 19:30:25.425770 4792 generic.go:334] "Generic (PLEG): container finished" podID="af51ad14-2117-48ef-875b-c1da5013d1a9" containerID="67c8bc1e1dbc67613142f5751677a78902b3963268cd53588bcd1f01df68ae7a" exitCode=0 Sep 29 19:30:25 crc kubenswrapper[4792]: I0929 19:30:25.425880 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-22p5m" event={"ID":"af51ad14-2117-48ef-875b-c1da5013d1a9","Type":"ContainerDied","Data":"67c8bc1e1dbc67613142f5751677a78902b3963268cd53588bcd1f01df68ae7a"} Sep 29 19:30:26 crc kubenswrapper[4792]: I0929 19:30:26.438564 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-22p5m" event={"ID":"af51ad14-2117-48ef-875b-c1da5013d1a9","Type":"ContainerStarted","Data":"cdcded7e81be963fa67123a58ef19b8265b1b6e7d296d792cee04665aee55406"} Sep 29 19:30:26 crc kubenswrapper[4792]: I0929 19:30:26.455794 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-22p5m" podStartSLOduration=1.95239992 podStartE2EDuration="5.455779309s" podCreationTimestamp="2025-09-29 19:30:21 +0000 UTC" firstStartedPulling="2025-09-29 19:30:22.391975513 +0000 UTC m=+2034.385282909" lastFinishedPulling="2025-09-29 19:30:25.895354902 +0000 UTC m=+2037.888662298" observedRunningTime="2025-09-29 19:30:26.455484501 +0000 UTC m=+2038.448791897" watchObservedRunningTime="2025-09-29 19:30:26.455779309 +0000 UTC m=+2038.449086705" Sep 29 19:30:27 crc kubenswrapper[4792]: I0929 19:30:27.449361 4792 generic.go:334] "Generic (PLEG): container finished" podID="3c2d29d5-9c65-4cb4-b66d-aeffaff2201f" containerID="104a75bc502085d88ca42051cc819939724b11a9409268a8b7a54b5f080bc6f1" exitCode=0 Sep 29 19:30:27 crc kubenswrapper[4792]: I0929 19:30:27.449452 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-25chb" event={"ID":"3c2d29d5-9c65-4cb4-b66d-aeffaff2201f","Type":"ContainerDied","Data":"104a75bc502085d88ca42051cc819939724b11a9409268a8b7a54b5f080bc6f1"} Sep 29 19:30:28 crc kubenswrapper[4792]: I0929 19:30:28.930082 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-25chb" Sep 29 19:30:29 crc kubenswrapper[4792]: I0929 19:30:29.100259 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/3c2d29d5-9c65-4cb4-b66d-aeffaff2201f-ssh-key\") pod \"3c2d29d5-9c65-4cb4-b66d-aeffaff2201f\" (UID: \"3c2d29d5-9c65-4cb4-b66d-aeffaff2201f\") " Sep 29 19:30:29 crc kubenswrapper[4792]: I0929 19:30:29.100710 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6wv5h\" (UniqueName: \"kubernetes.io/projected/3c2d29d5-9c65-4cb4-b66d-aeffaff2201f-kube-api-access-6wv5h\") pod \"3c2d29d5-9c65-4cb4-b66d-aeffaff2201f\" (UID: \"3c2d29d5-9c65-4cb4-b66d-aeffaff2201f\") " Sep 29 19:30:29 crc kubenswrapper[4792]: I0929 19:30:29.100842 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/3c2d29d5-9c65-4cb4-b66d-aeffaff2201f-inventory\") pod \"3c2d29d5-9c65-4cb4-b66d-aeffaff2201f\" (UID: \"3c2d29d5-9c65-4cb4-b66d-aeffaff2201f\") " Sep 29 19:30:29 crc kubenswrapper[4792]: I0929 19:30:29.106081 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3c2d29d5-9c65-4cb4-b66d-aeffaff2201f-kube-api-access-6wv5h" (OuterVolumeSpecName: "kube-api-access-6wv5h") pod "3c2d29d5-9c65-4cb4-b66d-aeffaff2201f" (UID: "3c2d29d5-9c65-4cb4-b66d-aeffaff2201f"). InnerVolumeSpecName "kube-api-access-6wv5h". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:30:29 crc kubenswrapper[4792]: I0929 19:30:29.127414 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3c2d29d5-9c65-4cb4-b66d-aeffaff2201f-inventory" (OuterVolumeSpecName: "inventory") pod "3c2d29d5-9c65-4cb4-b66d-aeffaff2201f" (UID: "3c2d29d5-9c65-4cb4-b66d-aeffaff2201f"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:30:29 crc kubenswrapper[4792]: I0929 19:30:29.128006 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3c2d29d5-9c65-4cb4-b66d-aeffaff2201f-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "3c2d29d5-9c65-4cb4-b66d-aeffaff2201f" (UID: "3c2d29d5-9c65-4cb4-b66d-aeffaff2201f"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:30:29 crc kubenswrapper[4792]: I0929 19:30:29.203888 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6wv5h\" (UniqueName: \"kubernetes.io/projected/3c2d29d5-9c65-4cb4-b66d-aeffaff2201f-kube-api-access-6wv5h\") on node \"crc\" DevicePath \"\"" Sep 29 19:30:29 crc kubenswrapper[4792]: I0929 19:30:29.203999 4792 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/3c2d29d5-9c65-4cb4-b66d-aeffaff2201f-inventory\") on node \"crc\" DevicePath \"\"" Sep 29 19:30:29 crc kubenswrapper[4792]: I0929 19:30:29.204014 4792 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/3c2d29d5-9c65-4cb4-b66d-aeffaff2201f-ssh-key\") on node \"crc\" DevicePath \"\"" Sep 29 19:30:29 crc kubenswrapper[4792]: I0929 19:30:29.508657 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-25chb" event={"ID":"3c2d29d5-9c65-4cb4-b66d-aeffaff2201f","Type":"ContainerDied","Data":"79cac87220b68f56e5ed9650d36dd0c744ceaa03d1f7d67700c36bcbcb496352"} Sep 29 19:30:29 crc kubenswrapper[4792]: I0929 19:30:29.508995 4792 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="79cac87220b68f56e5ed9650d36dd0c744ceaa03d1f7d67700c36bcbcb496352" Sep 29 19:30:29 crc kubenswrapper[4792]: I0929 19:30:29.508737 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-25chb" Sep 29 19:30:29 crc kubenswrapper[4792]: I0929 19:30:29.634411 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/install-certs-edpm-deployment-openstack-edpm-ipam-gfzmv"] Sep 29 19:30:29 crc kubenswrapper[4792]: E0929 19:30:29.634898 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3c2d29d5-9c65-4cb4-b66d-aeffaff2201f" containerName="reboot-os-edpm-deployment-openstack-edpm-ipam" Sep 29 19:30:29 crc kubenswrapper[4792]: I0929 19:30:29.634918 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="3c2d29d5-9c65-4cb4-b66d-aeffaff2201f" containerName="reboot-os-edpm-deployment-openstack-edpm-ipam" Sep 29 19:30:29 crc kubenswrapper[4792]: I0929 19:30:29.635164 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="3c2d29d5-9c65-4cb4-b66d-aeffaff2201f" containerName="reboot-os-edpm-deployment-openstack-edpm-ipam" Sep 29 19:30:29 crc kubenswrapper[4792]: I0929 19:30:29.636057 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-gfzmv" Sep 29 19:30:29 crc kubenswrapper[4792]: I0929 19:30:29.638408 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Sep 29 19:30:29 crc kubenswrapper[4792]: I0929 19:30:29.642954 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-libvirt-default-certs-0" Sep 29 19:30:29 crc kubenswrapper[4792]: I0929 19:30:29.643190 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Sep 29 19:30:29 crc kubenswrapper[4792]: I0929 19:30:29.643318 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-jvdxd" Sep 29 19:30:29 crc kubenswrapper[4792]: I0929 19:30:29.643423 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Sep 29 19:30:29 crc kubenswrapper[4792]: I0929 19:30:29.643555 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-neutron-metadata-default-certs-0" Sep 29 19:30:29 crc kubenswrapper[4792]: I0929 19:30:29.643697 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-ovn-default-certs-0" Sep 29 19:30:29 crc kubenswrapper[4792]: I0929 19:30:29.643776 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-telemetry-default-certs-0" Sep 29 19:30:29 crc kubenswrapper[4792]: I0929 19:30:29.652503 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-certs-edpm-deployment-openstack-edpm-ipam-gfzmv"] Sep 29 19:30:29 crc kubenswrapper[4792]: I0929 19:30:29.712363 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6ee2a07b-5943-4517-be5e-e1803f9d8a55-bootstrap-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-gfzmv\" (UID: \"6ee2a07b-5943-4517-be5e-e1803f9d8a55\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-gfzmv" Sep 29 19:30:29 crc kubenswrapper[4792]: I0929 19:30:29.712405 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/6ee2a07b-5943-4517-be5e-e1803f9d8a55-ssh-key\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-gfzmv\" (UID: \"6ee2a07b-5943-4517-be5e-e1803f9d8a55\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-gfzmv" Sep 29 19:30:29 crc kubenswrapper[4792]: I0929 19:30:29.712430 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wsd2w\" (UniqueName: \"kubernetes.io/projected/6ee2a07b-5943-4517-be5e-e1803f9d8a55-kube-api-access-wsd2w\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-gfzmv\" (UID: \"6ee2a07b-5943-4517-be5e-e1803f9d8a55\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-gfzmv" Sep 29 19:30:29 crc kubenswrapper[4792]: I0929 19:30:29.712459 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/6ee2a07b-5943-4517-be5e-e1803f9d8a55-openstack-edpm-ipam-neutron-metadata-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-gfzmv\" (UID: \"6ee2a07b-5943-4517-be5e-e1803f9d8a55\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-gfzmv" Sep 29 19:30:29 crc kubenswrapper[4792]: I0929 19:30:29.712494 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/6ee2a07b-5943-4517-be5e-e1803f9d8a55-openstack-edpm-ipam-telemetry-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-gfzmv\" (UID: \"6ee2a07b-5943-4517-be5e-e1803f9d8a55\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-gfzmv" Sep 29 19:30:29 crc kubenswrapper[4792]: I0929 19:30:29.712511 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/6ee2a07b-5943-4517-be5e-e1803f9d8a55-openstack-edpm-ipam-libvirt-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-gfzmv\" (UID: \"6ee2a07b-5943-4517-be5e-e1803f9d8a55\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-gfzmv" Sep 29 19:30:29 crc kubenswrapper[4792]: I0929 19:30:29.712529 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6ee2a07b-5943-4517-be5e-e1803f9d8a55-libvirt-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-gfzmv\" (UID: \"6ee2a07b-5943-4517-be5e-e1803f9d8a55\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-gfzmv" Sep 29 19:30:29 crc kubenswrapper[4792]: I0929 19:30:29.712557 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6ee2a07b-5943-4517-be5e-e1803f9d8a55-ovn-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-gfzmv\" (UID: \"6ee2a07b-5943-4517-be5e-e1803f9d8a55\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-gfzmv" Sep 29 19:30:29 crc kubenswrapper[4792]: I0929 19:30:29.712577 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6ee2a07b-5943-4517-be5e-e1803f9d8a55-telemetry-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-gfzmv\" (UID: \"6ee2a07b-5943-4517-be5e-e1803f9d8a55\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-gfzmv" Sep 29 19:30:29 crc kubenswrapper[4792]: I0929 19:30:29.712599 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/6ee2a07b-5943-4517-be5e-e1803f9d8a55-inventory\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-gfzmv\" (UID: \"6ee2a07b-5943-4517-be5e-e1803f9d8a55\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-gfzmv" Sep 29 19:30:29 crc kubenswrapper[4792]: I0929 19:30:29.712625 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6ee2a07b-5943-4517-be5e-e1803f9d8a55-repo-setup-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-gfzmv\" (UID: \"6ee2a07b-5943-4517-be5e-e1803f9d8a55\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-gfzmv" Sep 29 19:30:29 crc kubenswrapper[4792]: I0929 19:30:29.712651 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6ee2a07b-5943-4517-be5e-e1803f9d8a55-nova-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-gfzmv\" (UID: \"6ee2a07b-5943-4517-be5e-e1803f9d8a55\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-gfzmv" Sep 29 19:30:29 crc kubenswrapper[4792]: I0929 19:30:29.712697 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6ee2a07b-5943-4517-be5e-e1803f9d8a55-neutron-metadata-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-gfzmv\" (UID: \"6ee2a07b-5943-4517-be5e-e1803f9d8a55\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-gfzmv" Sep 29 19:30:29 crc kubenswrapper[4792]: I0929 19:30:29.712720 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/6ee2a07b-5943-4517-be5e-e1803f9d8a55-openstack-edpm-ipam-ovn-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-gfzmv\" (UID: \"6ee2a07b-5943-4517-be5e-e1803f9d8a55\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-gfzmv" Sep 29 19:30:29 crc kubenswrapper[4792]: I0929 19:30:29.814694 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6ee2a07b-5943-4517-be5e-e1803f9d8a55-ovn-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-gfzmv\" (UID: \"6ee2a07b-5943-4517-be5e-e1803f9d8a55\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-gfzmv" Sep 29 19:30:29 crc kubenswrapper[4792]: I0929 19:30:29.814739 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6ee2a07b-5943-4517-be5e-e1803f9d8a55-telemetry-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-gfzmv\" (UID: \"6ee2a07b-5943-4517-be5e-e1803f9d8a55\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-gfzmv" Sep 29 19:30:29 crc kubenswrapper[4792]: I0929 19:30:29.814767 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/6ee2a07b-5943-4517-be5e-e1803f9d8a55-inventory\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-gfzmv\" (UID: \"6ee2a07b-5943-4517-be5e-e1803f9d8a55\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-gfzmv" Sep 29 19:30:29 crc kubenswrapper[4792]: I0929 19:30:29.814796 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6ee2a07b-5943-4517-be5e-e1803f9d8a55-repo-setup-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-gfzmv\" (UID: \"6ee2a07b-5943-4517-be5e-e1803f9d8a55\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-gfzmv" Sep 29 19:30:29 crc kubenswrapper[4792]: I0929 19:30:29.814823 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6ee2a07b-5943-4517-be5e-e1803f9d8a55-nova-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-gfzmv\" (UID: \"6ee2a07b-5943-4517-be5e-e1803f9d8a55\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-gfzmv" Sep 29 19:30:29 crc kubenswrapper[4792]: I0929 19:30:29.814901 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6ee2a07b-5943-4517-be5e-e1803f9d8a55-neutron-metadata-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-gfzmv\" (UID: \"6ee2a07b-5943-4517-be5e-e1803f9d8a55\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-gfzmv" Sep 29 19:30:29 crc kubenswrapper[4792]: I0929 19:30:29.814934 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/6ee2a07b-5943-4517-be5e-e1803f9d8a55-openstack-edpm-ipam-ovn-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-gfzmv\" (UID: \"6ee2a07b-5943-4517-be5e-e1803f9d8a55\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-gfzmv" Sep 29 19:30:29 crc kubenswrapper[4792]: I0929 19:30:29.814976 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6ee2a07b-5943-4517-be5e-e1803f9d8a55-bootstrap-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-gfzmv\" (UID: \"6ee2a07b-5943-4517-be5e-e1803f9d8a55\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-gfzmv" Sep 29 19:30:29 crc kubenswrapper[4792]: I0929 19:30:29.815006 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/6ee2a07b-5943-4517-be5e-e1803f9d8a55-ssh-key\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-gfzmv\" (UID: \"6ee2a07b-5943-4517-be5e-e1803f9d8a55\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-gfzmv" Sep 29 19:30:29 crc kubenswrapper[4792]: I0929 19:30:29.815033 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wsd2w\" (UniqueName: \"kubernetes.io/projected/6ee2a07b-5943-4517-be5e-e1803f9d8a55-kube-api-access-wsd2w\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-gfzmv\" (UID: \"6ee2a07b-5943-4517-be5e-e1803f9d8a55\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-gfzmv" Sep 29 19:30:29 crc kubenswrapper[4792]: I0929 19:30:29.815075 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/6ee2a07b-5943-4517-be5e-e1803f9d8a55-openstack-edpm-ipam-neutron-metadata-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-gfzmv\" (UID: \"6ee2a07b-5943-4517-be5e-e1803f9d8a55\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-gfzmv" Sep 29 19:30:29 crc kubenswrapper[4792]: I0929 19:30:29.815116 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/6ee2a07b-5943-4517-be5e-e1803f9d8a55-openstack-edpm-ipam-telemetry-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-gfzmv\" (UID: \"6ee2a07b-5943-4517-be5e-e1803f9d8a55\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-gfzmv" Sep 29 19:30:29 crc kubenswrapper[4792]: I0929 19:30:29.815136 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/6ee2a07b-5943-4517-be5e-e1803f9d8a55-openstack-edpm-ipam-libvirt-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-gfzmv\" (UID: \"6ee2a07b-5943-4517-be5e-e1803f9d8a55\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-gfzmv" Sep 29 19:30:29 crc kubenswrapper[4792]: I0929 19:30:29.815159 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6ee2a07b-5943-4517-be5e-e1803f9d8a55-libvirt-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-gfzmv\" (UID: \"6ee2a07b-5943-4517-be5e-e1803f9d8a55\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-gfzmv" Sep 29 19:30:29 crc kubenswrapper[4792]: I0929 19:30:29.819749 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6ee2a07b-5943-4517-be5e-e1803f9d8a55-ovn-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-gfzmv\" (UID: \"6ee2a07b-5943-4517-be5e-e1803f9d8a55\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-gfzmv" Sep 29 19:30:29 crc kubenswrapper[4792]: I0929 19:30:29.820339 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6ee2a07b-5943-4517-be5e-e1803f9d8a55-bootstrap-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-gfzmv\" (UID: \"6ee2a07b-5943-4517-be5e-e1803f9d8a55\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-gfzmv" Sep 29 19:30:29 crc kubenswrapper[4792]: I0929 19:30:29.821574 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6ee2a07b-5943-4517-be5e-e1803f9d8a55-repo-setup-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-gfzmv\" (UID: \"6ee2a07b-5943-4517-be5e-e1803f9d8a55\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-gfzmv" Sep 29 19:30:29 crc kubenswrapper[4792]: I0929 19:30:29.822387 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6ee2a07b-5943-4517-be5e-e1803f9d8a55-telemetry-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-gfzmv\" (UID: \"6ee2a07b-5943-4517-be5e-e1803f9d8a55\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-gfzmv" Sep 29 19:30:29 crc kubenswrapper[4792]: I0929 19:30:29.823714 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6ee2a07b-5943-4517-be5e-e1803f9d8a55-nova-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-gfzmv\" (UID: \"6ee2a07b-5943-4517-be5e-e1803f9d8a55\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-gfzmv" Sep 29 19:30:29 crc kubenswrapper[4792]: I0929 19:30:29.824411 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/6ee2a07b-5943-4517-be5e-e1803f9d8a55-ssh-key\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-gfzmv\" (UID: \"6ee2a07b-5943-4517-be5e-e1803f9d8a55\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-gfzmv" Sep 29 19:30:29 crc kubenswrapper[4792]: I0929 19:30:29.824597 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/6ee2a07b-5943-4517-be5e-e1803f9d8a55-inventory\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-gfzmv\" (UID: \"6ee2a07b-5943-4517-be5e-e1803f9d8a55\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-gfzmv" Sep 29 19:30:29 crc kubenswrapper[4792]: I0929 19:30:29.825316 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/6ee2a07b-5943-4517-be5e-e1803f9d8a55-openstack-edpm-ipam-telemetry-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-gfzmv\" (UID: \"6ee2a07b-5943-4517-be5e-e1803f9d8a55\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-gfzmv" Sep 29 19:30:29 crc kubenswrapper[4792]: I0929 19:30:29.825992 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6ee2a07b-5943-4517-be5e-e1803f9d8a55-neutron-metadata-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-gfzmv\" (UID: \"6ee2a07b-5943-4517-be5e-e1803f9d8a55\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-gfzmv" Sep 29 19:30:29 crc kubenswrapper[4792]: I0929 19:30:29.826042 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6ee2a07b-5943-4517-be5e-e1803f9d8a55-libvirt-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-gfzmv\" (UID: \"6ee2a07b-5943-4517-be5e-e1803f9d8a55\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-gfzmv" Sep 29 19:30:29 crc kubenswrapper[4792]: I0929 19:30:29.826349 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/6ee2a07b-5943-4517-be5e-e1803f9d8a55-openstack-edpm-ipam-ovn-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-gfzmv\" (UID: \"6ee2a07b-5943-4517-be5e-e1803f9d8a55\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-gfzmv" Sep 29 19:30:29 crc kubenswrapper[4792]: I0929 19:30:29.827408 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/6ee2a07b-5943-4517-be5e-e1803f9d8a55-openstack-edpm-ipam-neutron-metadata-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-gfzmv\" (UID: \"6ee2a07b-5943-4517-be5e-e1803f9d8a55\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-gfzmv" Sep 29 19:30:29 crc kubenswrapper[4792]: I0929 19:30:29.832135 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/6ee2a07b-5943-4517-be5e-e1803f9d8a55-openstack-edpm-ipam-libvirt-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-gfzmv\" (UID: \"6ee2a07b-5943-4517-be5e-e1803f9d8a55\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-gfzmv" Sep 29 19:30:29 crc kubenswrapper[4792]: I0929 19:30:29.834502 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wsd2w\" (UniqueName: \"kubernetes.io/projected/6ee2a07b-5943-4517-be5e-e1803f9d8a55-kube-api-access-wsd2w\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-gfzmv\" (UID: \"6ee2a07b-5943-4517-be5e-e1803f9d8a55\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-gfzmv" Sep 29 19:30:29 crc kubenswrapper[4792]: I0929 19:30:29.954838 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-gfzmv" Sep 29 19:30:30 crc kubenswrapper[4792]: I0929 19:30:30.554958 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-certs-edpm-deployment-openstack-edpm-ipam-gfzmv"] Sep 29 19:30:30 crc kubenswrapper[4792]: W0929 19:30:30.558075 4792 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6ee2a07b_5943_4517_be5e_e1803f9d8a55.slice/crio-c2faaeb53b127fb156a36eea9052acea95082e95f487b36e3fff25428279febb WatchSource:0}: Error finding container c2faaeb53b127fb156a36eea9052acea95082e95f487b36e3fff25428279febb: Status 404 returned error can't find the container with id c2faaeb53b127fb156a36eea9052acea95082e95f487b36e3fff25428279febb Sep 29 19:30:31 crc kubenswrapper[4792]: I0929 19:30:31.398329 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-22p5m" Sep 29 19:30:31 crc kubenswrapper[4792]: I0929 19:30:31.399830 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-22p5m" Sep 29 19:30:31 crc kubenswrapper[4792]: I0929 19:30:31.449060 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-22p5m" Sep 29 19:30:31 crc kubenswrapper[4792]: I0929 19:30:31.529173 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-gfzmv" event={"ID":"6ee2a07b-5943-4517-be5e-e1803f9d8a55","Type":"ContainerStarted","Data":"2a0b874e0c6222183d5ff5eaaab4f1eb52591e83ca7bc809e4bef029b953e907"} Sep 29 19:30:31 crc kubenswrapper[4792]: I0929 19:30:31.529218 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-gfzmv" event={"ID":"6ee2a07b-5943-4517-be5e-e1803f9d8a55","Type":"ContainerStarted","Data":"c2faaeb53b127fb156a36eea9052acea95082e95f487b36e3fff25428279febb"} Sep 29 19:30:31 crc kubenswrapper[4792]: I0929 19:30:31.548699 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-gfzmv" podStartSLOduration=2.3738878789999998 podStartE2EDuration="2.54867793s" podCreationTimestamp="2025-09-29 19:30:29 +0000 UTC" firstStartedPulling="2025-09-29 19:30:30.559772106 +0000 UTC m=+2042.553079502" lastFinishedPulling="2025-09-29 19:30:30.734562157 +0000 UTC m=+2042.727869553" observedRunningTime="2025-09-29 19:30:31.545563819 +0000 UTC m=+2043.538871215" watchObservedRunningTime="2025-09-29 19:30:31.54867793 +0000 UTC m=+2043.541985326" Sep 29 19:30:31 crc kubenswrapper[4792]: I0929 19:30:31.570593 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-22p5m" Sep 29 19:30:31 crc kubenswrapper[4792]: I0929 19:30:31.683920 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-22p5m"] Sep 29 19:30:33 crc kubenswrapper[4792]: I0929 19:30:33.548019 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-22p5m" podUID="af51ad14-2117-48ef-875b-c1da5013d1a9" containerName="registry-server" containerID="cri-o://cdcded7e81be963fa67123a58ef19b8265b1b6e7d296d792cee04665aee55406" gracePeriod=2 Sep 29 19:30:33 crc kubenswrapper[4792]: I0929 19:30:33.984162 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-22p5m" Sep 29 19:30:34 crc kubenswrapper[4792]: I0929 19:30:34.100045 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/af51ad14-2117-48ef-875b-c1da5013d1a9-catalog-content\") pod \"af51ad14-2117-48ef-875b-c1da5013d1a9\" (UID: \"af51ad14-2117-48ef-875b-c1da5013d1a9\") " Sep 29 19:30:34 crc kubenswrapper[4792]: I0929 19:30:34.100172 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/af51ad14-2117-48ef-875b-c1da5013d1a9-utilities\") pod \"af51ad14-2117-48ef-875b-c1da5013d1a9\" (UID: \"af51ad14-2117-48ef-875b-c1da5013d1a9\") " Sep 29 19:30:34 crc kubenswrapper[4792]: I0929 19:30:34.100258 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v4r8n\" (UniqueName: \"kubernetes.io/projected/af51ad14-2117-48ef-875b-c1da5013d1a9-kube-api-access-v4r8n\") pod \"af51ad14-2117-48ef-875b-c1da5013d1a9\" (UID: \"af51ad14-2117-48ef-875b-c1da5013d1a9\") " Sep 29 19:30:34 crc kubenswrapper[4792]: I0929 19:30:34.101395 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/af51ad14-2117-48ef-875b-c1da5013d1a9-utilities" (OuterVolumeSpecName: "utilities") pod "af51ad14-2117-48ef-875b-c1da5013d1a9" (UID: "af51ad14-2117-48ef-875b-c1da5013d1a9"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 19:30:34 crc kubenswrapper[4792]: I0929 19:30:34.106149 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/af51ad14-2117-48ef-875b-c1da5013d1a9-kube-api-access-v4r8n" (OuterVolumeSpecName: "kube-api-access-v4r8n") pod "af51ad14-2117-48ef-875b-c1da5013d1a9" (UID: "af51ad14-2117-48ef-875b-c1da5013d1a9"). InnerVolumeSpecName "kube-api-access-v4r8n". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:30:34 crc kubenswrapper[4792]: I0929 19:30:34.145895 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/af51ad14-2117-48ef-875b-c1da5013d1a9-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "af51ad14-2117-48ef-875b-c1da5013d1a9" (UID: "af51ad14-2117-48ef-875b-c1da5013d1a9"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 19:30:34 crc kubenswrapper[4792]: I0929 19:30:34.202792 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v4r8n\" (UniqueName: \"kubernetes.io/projected/af51ad14-2117-48ef-875b-c1da5013d1a9-kube-api-access-v4r8n\") on node \"crc\" DevicePath \"\"" Sep 29 19:30:34 crc kubenswrapper[4792]: I0929 19:30:34.202825 4792 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/af51ad14-2117-48ef-875b-c1da5013d1a9-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 19:30:34 crc kubenswrapper[4792]: I0929 19:30:34.202834 4792 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/af51ad14-2117-48ef-875b-c1da5013d1a9-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 19:30:34 crc kubenswrapper[4792]: I0929 19:30:34.560918 4792 generic.go:334] "Generic (PLEG): container finished" podID="af51ad14-2117-48ef-875b-c1da5013d1a9" containerID="cdcded7e81be963fa67123a58ef19b8265b1b6e7d296d792cee04665aee55406" exitCode=0 Sep 29 19:30:34 crc kubenswrapper[4792]: I0929 19:30:34.560958 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-22p5m" event={"ID":"af51ad14-2117-48ef-875b-c1da5013d1a9","Type":"ContainerDied","Data":"cdcded7e81be963fa67123a58ef19b8265b1b6e7d296d792cee04665aee55406"} Sep 29 19:30:34 crc kubenswrapper[4792]: I0929 19:30:34.560990 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-22p5m" event={"ID":"af51ad14-2117-48ef-875b-c1da5013d1a9","Type":"ContainerDied","Data":"515ef96460f1510a167280fe1f21c52245a87b8eeb9d1f9db784156c1e7c028d"} Sep 29 19:30:34 crc kubenswrapper[4792]: I0929 19:30:34.561007 4792 scope.go:117] "RemoveContainer" containerID="cdcded7e81be963fa67123a58ef19b8265b1b6e7d296d792cee04665aee55406" Sep 29 19:30:34 crc kubenswrapper[4792]: I0929 19:30:34.561053 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-22p5m" Sep 29 19:30:34 crc kubenswrapper[4792]: I0929 19:30:34.587970 4792 scope.go:117] "RemoveContainer" containerID="67c8bc1e1dbc67613142f5751677a78902b3963268cd53588bcd1f01df68ae7a" Sep 29 19:30:34 crc kubenswrapper[4792]: I0929 19:30:34.613917 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-22p5m"] Sep 29 19:30:34 crc kubenswrapper[4792]: I0929 19:30:34.620273 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-22p5m"] Sep 29 19:30:34 crc kubenswrapper[4792]: I0929 19:30:34.631251 4792 scope.go:117] "RemoveContainer" containerID="707ae6073bf1a9855d9bcbbe81db0c13233dc90d3fd51df92434d817f9589304" Sep 29 19:30:34 crc kubenswrapper[4792]: I0929 19:30:34.674434 4792 scope.go:117] "RemoveContainer" containerID="cdcded7e81be963fa67123a58ef19b8265b1b6e7d296d792cee04665aee55406" Sep 29 19:30:34 crc kubenswrapper[4792]: E0929 19:30:34.674908 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cdcded7e81be963fa67123a58ef19b8265b1b6e7d296d792cee04665aee55406\": container with ID starting with cdcded7e81be963fa67123a58ef19b8265b1b6e7d296d792cee04665aee55406 not found: ID does not exist" containerID="cdcded7e81be963fa67123a58ef19b8265b1b6e7d296d792cee04665aee55406" Sep 29 19:30:34 crc kubenswrapper[4792]: I0929 19:30:34.674950 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cdcded7e81be963fa67123a58ef19b8265b1b6e7d296d792cee04665aee55406"} err="failed to get container status \"cdcded7e81be963fa67123a58ef19b8265b1b6e7d296d792cee04665aee55406\": rpc error: code = NotFound desc = could not find container \"cdcded7e81be963fa67123a58ef19b8265b1b6e7d296d792cee04665aee55406\": container with ID starting with cdcded7e81be963fa67123a58ef19b8265b1b6e7d296d792cee04665aee55406 not found: ID does not exist" Sep 29 19:30:34 crc kubenswrapper[4792]: I0929 19:30:34.674978 4792 scope.go:117] "RemoveContainer" containerID="67c8bc1e1dbc67613142f5751677a78902b3963268cd53588bcd1f01df68ae7a" Sep 29 19:30:34 crc kubenswrapper[4792]: E0929 19:30:34.675426 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"67c8bc1e1dbc67613142f5751677a78902b3963268cd53588bcd1f01df68ae7a\": container with ID starting with 67c8bc1e1dbc67613142f5751677a78902b3963268cd53588bcd1f01df68ae7a not found: ID does not exist" containerID="67c8bc1e1dbc67613142f5751677a78902b3963268cd53588bcd1f01df68ae7a" Sep 29 19:30:34 crc kubenswrapper[4792]: I0929 19:30:34.675515 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"67c8bc1e1dbc67613142f5751677a78902b3963268cd53588bcd1f01df68ae7a"} err="failed to get container status \"67c8bc1e1dbc67613142f5751677a78902b3963268cd53588bcd1f01df68ae7a\": rpc error: code = NotFound desc = could not find container \"67c8bc1e1dbc67613142f5751677a78902b3963268cd53588bcd1f01df68ae7a\": container with ID starting with 67c8bc1e1dbc67613142f5751677a78902b3963268cd53588bcd1f01df68ae7a not found: ID does not exist" Sep 29 19:30:34 crc kubenswrapper[4792]: I0929 19:30:34.675590 4792 scope.go:117] "RemoveContainer" containerID="707ae6073bf1a9855d9bcbbe81db0c13233dc90d3fd51df92434d817f9589304" Sep 29 19:30:34 crc kubenswrapper[4792]: E0929 19:30:34.676074 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"707ae6073bf1a9855d9bcbbe81db0c13233dc90d3fd51df92434d817f9589304\": container with ID starting with 707ae6073bf1a9855d9bcbbe81db0c13233dc90d3fd51df92434d817f9589304 not found: ID does not exist" containerID="707ae6073bf1a9855d9bcbbe81db0c13233dc90d3fd51df92434d817f9589304" Sep 29 19:30:34 crc kubenswrapper[4792]: I0929 19:30:34.676147 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"707ae6073bf1a9855d9bcbbe81db0c13233dc90d3fd51df92434d817f9589304"} err="failed to get container status \"707ae6073bf1a9855d9bcbbe81db0c13233dc90d3fd51df92434d817f9589304\": rpc error: code = NotFound desc = could not find container \"707ae6073bf1a9855d9bcbbe81db0c13233dc90d3fd51df92434d817f9589304\": container with ID starting with 707ae6073bf1a9855d9bcbbe81db0c13233dc90d3fd51df92434d817f9589304 not found: ID does not exist" Sep 29 19:30:35 crc kubenswrapper[4792]: I0929 19:30:35.028191 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="af51ad14-2117-48ef-875b-c1da5013d1a9" path="/var/lib/kubelet/pods/af51ad14-2117-48ef-875b-c1da5013d1a9/volumes" Sep 29 19:30:37 crc kubenswrapper[4792]: I0929 19:30:37.740897 4792 scope.go:117] "RemoveContainer" containerID="826268197f27b99f4272acfc3e772be0e0fad546ed949d05bf541bdd9b1984bc" Sep 29 19:31:15 crc kubenswrapper[4792]: I0929 19:31:15.936319 4792 generic.go:334] "Generic (PLEG): container finished" podID="6ee2a07b-5943-4517-be5e-e1803f9d8a55" containerID="2a0b874e0c6222183d5ff5eaaab4f1eb52591e83ca7bc809e4bef029b953e907" exitCode=0 Sep 29 19:31:15 crc kubenswrapper[4792]: I0929 19:31:15.936416 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-gfzmv" event={"ID":"6ee2a07b-5943-4517-be5e-e1803f9d8a55","Type":"ContainerDied","Data":"2a0b874e0c6222183d5ff5eaaab4f1eb52591e83ca7bc809e4bef029b953e907"} Sep 29 19:31:17 crc kubenswrapper[4792]: I0929 19:31:17.386562 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-gfzmv" Sep 29 19:31:17 crc kubenswrapper[4792]: I0929 19:31:17.462571 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6ee2a07b-5943-4517-be5e-e1803f9d8a55-ovn-combined-ca-bundle\") pod \"6ee2a07b-5943-4517-be5e-e1803f9d8a55\" (UID: \"6ee2a07b-5943-4517-be5e-e1803f9d8a55\") " Sep 29 19:31:17 crc kubenswrapper[4792]: I0929 19:31:17.462633 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6ee2a07b-5943-4517-be5e-e1803f9d8a55-libvirt-combined-ca-bundle\") pod \"6ee2a07b-5943-4517-be5e-e1803f9d8a55\" (UID: \"6ee2a07b-5943-4517-be5e-e1803f9d8a55\") " Sep 29 19:31:17 crc kubenswrapper[4792]: I0929 19:31:17.462656 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6ee2a07b-5943-4517-be5e-e1803f9d8a55-nova-combined-ca-bundle\") pod \"6ee2a07b-5943-4517-be5e-e1803f9d8a55\" (UID: \"6ee2a07b-5943-4517-be5e-e1803f9d8a55\") " Sep 29 19:31:17 crc kubenswrapper[4792]: I0929 19:31:17.462726 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/6ee2a07b-5943-4517-be5e-e1803f9d8a55-inventory\") pod \"6ee2a07b-5943-4517-be5e-e1803f9d8a55\" (UID: \"6ee2a07b-5943-4517-be5e-e1803f9d8a55\") " Sep 29 19:31:17 crc kubenswrapper[4792]: I0929 19:31:17.462788 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/6ee2a07b-5943-4517-be5e-e1803f9d8a55-openstack-edpm-ipam-libvirt-default-certs-0\") pod \"6ee2a07b-5943-4517-be5e-e1803f9d8a55\" (UID: \"6ee2a07b-5943-4517-be5e-e1803f9d8a55\") " Sep 29 19:31:17 crc kubenswrapper[4792]: I0929 19:31:17.462876 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6ee2a07b-5943-4517-be5e-e1803f9d8a55-neutron-metadata-combined-ca-bundle\") pod \"6ee2a07b-5943-4517-be5e-e1803f9d8a55\" (UID: \"6ee2a07b-5943-4517-be5e-e1803f9d8a55\") " Sep 29 19:31:17 crc kubenswrapper[4792]: I0929 19:31:17.462970 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/6ee2a07b-5943-4517-be5e-e1803f9d8a55-openstack-edpm-ipam-telemetry-default-certs-0\") pod \"6ee2a07b-5943-4517-be5e-e1803f9d8a55\" (UID: \"6ee2a07b-5943-4517-be5e-e1803f9d8a55\") " Sep 29 19:31:17 crc kubenswrapper[4792]: I0929 19:31:17.462997 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/6ee2a07b-5943-4517-be5e-e1803f9d8a55-openstack-edpm-ipam-neutron-metadata-default-certs-0\") pod \"6ee2a07b-5943-4517-be5e-e1803f9d8a55\" (UID: \"6ee2a07b-5943-4517-be5e-e1803f9d8a55\") " Sep 29 19:31:17 crc kubenswrapper[4792]: I0929 19:31:17.463016 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wsd2w\" (UniqueName: \"kubernetes.io/projected/6ee2a07b-5943-4517-be5e-e1803f9d8a55-kube-api-access-wsd2w\") pod \"6ee2a07b-5943-4517-be5e-e1803f9d8a55\" (UID: \"6ee2a07b-5943-4517-be5e-e1803f9d8a55\") " Sep 29 19:31:17 crc kubenswrapper[4792]: I0929 19:31:17.463058 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6ee2a07b-5943-4517-be5e-e1803f9d8a55-telemetry-combined-ca-bundle\") pod \"6ee2a07b-5943-4517-be5e-e1803f9d8a55\" (UID: \"6ee2a07b-5943-4517-be5e-e1803f9d8a55\") " Sep 29 19:31:17 crc kubenswrapper[4792]: I0929 19:31:17.463096 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/6ee2a07b-5943-4517-be5e-e1803f9d8a55-openstack-edpm-ipam-ovn-default-certs-0\") pod \"6ee2a07b-5943-4517-be5e-e1803f9d8a55\" (UID: \"6ee2a07b-5943-4517-be5e-e1803f9d8a55\") " Sep 29 19:31:17 crc kubenswrapper[4792]: I0929 19:31:17.463114 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/6ee2a07b-5943-4517-be5e-e1803f9d8a55-ssh-key\") pod \"6ee2a07b-5943-4517-be5e-e1803f9d8a55\" (UID: \"6ee2a07b-5943-4517-be5e-e1803f9d8a55\") " Sep 29 19:31:17 crc kubenswrapper[4792]: I0929 19:31:17.463137 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6ee2a07b-5943-4517-be5e-e1803f9d8a55-bootstrap-combined-ca-bundle\") pod \"6ee2a07b-5943-4517-be5e-e1803f9d8a55\" (UID: \"6ee2a07b-5943-4517-be5e-e1803f9d8a55\") " Sep 29 19:31:17 crc kubenswrapper[4792]: I0929 19:31:17.463165 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6ee2a07b-5943-4517-be5e-e1803f9d8a55-repo-setup-combined-ca-bundle\") pod \"6ee2a07b-5943-4517-be5e-e1803f9d8a55\" (UID: \"6ee2a07b-5943-4517-be5e-e1803f9d8a55\") " Sep 29 19:31:17 crc kubenswrapper[4792]: I0929 19:31:17.470633 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6ee2a07b-5943-4517-be5e-e1803f9d8a55-ovn-combined-ca-bundle" (OuterVolumeSpecName: "ovn-combined-ca-bundle") pod "6ee2a07b-5943-4517-be5e-e1803f9d8a55" (UID: "6ee2a07b-5943-4517-be5e-e1803f9d8a55"). InnerVolumeSpecName "ovn-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:31:17 crc kubenswrapper[4792]: I0929 19:31:17.471544 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6ee2a07b-5943-4517-be5e-e1803f9d8a55-libvirt-combined-ca-bundle" (OuterVolumeSpecName: "libvirt-combined-ca-bundle") pod "6ee2a07b-5943-4517-be5e-e1803f9d8a55" (UID: "6ee2a07b-5943-4517-be5e-e1803f9d8a55"). InnerVolumeSpecName "libvirt-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:31:17 crc kubenswrapper[4792]: I0929 19:31:17.471703 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6ee2a07b-5943-4517-be5e-e1803f9d8a55-neutron-metadata-combined-ca-bundle" (OuterVolumeSpecName: "neutron-metadata-combined-ca-bundle") pod "6ee2a07b-5943-4517-be5e-e1803f9d8a55" (UID: "6ee2a07b-5943-4517-be5e-e1803f9d8a55"). InnerVolumeSpecName "neutron-metadata-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:31:17 crc kubenswrapper[4792]: I0929 19:31:17.472117 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6ee2a07b-5943-4517-be5e-e1803f9d8a55-nova-combined-ca-bundle" (OuterVolumeSpecName: "nova-combined-ca-bundle") pod "6ee2a07b-5943-4517-be5e-e1803f9d8a55" (UID: "6ee2a07b-5943-4517-be5e-e1803f9d8a55"). InnerVolumeSpecName "nova-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:31:17 crc kubenswrapper[4792]: I0929 19:31:17.472862 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6ee2a07b-5943-4517-be5e-e1803f9d8a55-repo-setup-combined-ca-bundle" (OuterVolumeSpecName: "repo-setup-combined-ca-bundle") pod "6ee2a07b-5943-4517-be5e-e1803f9d8a55" (UID: "6ee2a07b-5943-4517-be5e-e1803f9d8a55"). InnerVolumeSpecName "repo-setup-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:31:17 crc kubenswrapper[4792]: I0929 19:31:17.472985 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6ee2a07b-5943-4517-be5e-e1803f9d8a55-openstack-edpm-ipam-libvirt-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-ipam-libvirt-default-certs-0") pod "6ee2a07b-5943-4517-be5e-e1803f9d8a55" (UID: "6ee2a07b-5943-4517-be5e-e1803f9d8a55"). InnerVolumeSpecName "openstack-edpm-ipam-libvirt-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:31:17 crc kubenswrapper[4792]: I0929 19:31:17.474069 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6ee2a07b-5943-4517-be5e-e1803f9d8a55-bootstrap-combined-ca-bundle" (OuterVolumeSpecName: "bootstrap-combined-ca-bundle") pod "6ee2a07b-5943-4517-be5e-e1803f9d8a55" (UID: "6ee2a07b-5943-4517-be5e-e1803f9d8a55"). InnerVolumeSpecName "bootstrap-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:31:17 crc kubenswrapper[4792]: I0929 19:31:17.475005 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6ee2a07b-5943-4517-be5e-e1803f9d8a55-telemetry-combined-ca-bundle" (OuterVolumeSpecName: "telemetry-combined-ca-bundle") pod "6ee2a07b-5943-4517-be5e-e1803f9d8a55" (UID: "6ee2a07b-5943-4517-be5e-e1803f9d8a55"). InnerVolumeSpecName "telemetry-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:31:17 crc kubenswrapper[4792]: I0929 19:31:17.475028 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6ee2a07b-5943-4517-be5e-e1803f9d8a55-openstack-edpm-ipam-telemetry-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-ipam-telemetry-default-certs-0") pod "6ee2a07b-5943-4517-be5e-e1803f9d8a55" (UID: "6ee2a07b-5943-4517-be5e-e1803f9d8a55"). InnerVolumeSpecName "openstack-edpm-ipam-telemetry-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:31:17 crc kubenswrapper[4792]: I0929 19:31:17.475012 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6ee2a07b-5943-4517-be5e-e1803f9d8a55-openstack-edpm-ipam-ovn-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-ipam-ovn-default-certs-0") pod "6ee2a07b-5943-4517-be5e-e1803f9d8a55" (UID: "6ee2a07b-5943-4517-be5e-e1803f9d8a55"). InnerVolumeSpecName "openstack-edpm-ipam-ovn-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:31:17 crc kubenswrapper[4792]: I0929 19:31:17.476382 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6ee2a07b-5943-4517-be5e-e1803f9d8a55-openstack-edpm-ipam-neutron-metadata-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-ipam-neutron-metadata-default-certs-0") pod "6ee2a07b-5943-4517-be5e-e1803f9d8a55" (UID: "6ee2a07b-5943-4517-be5e-e1803f9d8a55"). InnerVolumeSpecName "openstack-edpm-ipam-neutron-metadata-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:31:17 crc kubenswrapper[4792]: I0929 19:31:17.490040 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6ee2a07b-5943-4517-be5e-e1803f9d8a55-kube-api-access-wsd2w" (OuterVolumeSpecName: "kube-api-access-wsd2w") pod "6ee2a07b-5943-4517-be5e-e1803f9d8a55" (UID: "6ee2a07b-5943-4517-be5e-e1803f9d8a55"). InnerVolumeSpecName "kube-api-access-wsd2w". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:31:17 crc kubenswrapper[4792]: I0929 19:31:17.500055 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6ee2a07b-5943-4517-be5e-e1803f9d8a55-inventory" (OuterVolumeSpecName: "inventory") pod "6ee2a07b-5943-4517-be5e-e1803f9d8a55" (UID: "6ee2a07b-5943-4517-be5e-e1803f9d8a55"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:31:17 crc kubenswrapper[4792]: I0929 19:31:17.518183 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6ee2a07b-5943-4517-be5e-e1803f9d8a55-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "6ee2a07b-5943-4517-be5e-e1803f9d8a55" (UID: "6ee2a07b-5943-4517-be5e-e1803f9d8a55"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:31:17 crc kubenswrapper[4792]: I0929 19:31:17.565421 4792 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/6ee2a07b-5943-4517-be5e-e1803f9d8a55-inventory\") on node \"crc\" DevicePath \"\"" Sep 29 19:31:17 crc kubenswrapper[4792]: I0929 19:31:17.565452 4792 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/6ee2a07b-5943-4517-be5e-e1803f9d8a55-openstack-edpm-ipam-libvirt-default-certs-0\") on node \"crc\" DevicePath \"\"" Sep 29 19:31:17 crc kubenswrapper[4792]: I0929 19:31:17.565466 4792 reconciler_common.go:293] "Volume detached for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6ee2a07b-5943-4517-be5e-e1803f9d8a55-neutron-metadata-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 19:31:17 crc kubenswrapper[4792]: I0929 19:31:17.565476 4792 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/6ee2a07b-5943-4517-be5e-e1803f9d8a55-openstack-edpm-ipam-telemetry-default-certs-0\") on node \"crc\" DevicePath \"\"" Sep 29 19:31:17 crc kubenswrapper[4792]: I0929 19:31:17.565488 4792 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/6ee2a07b-5943-4517-be5e-e1803f9d8a55-openstack-edpm-ipam-neutron-metadata-default-certs-0\") on node \"crc\" DevicePath \"\"" Sep 29 19:31:17 crc kubenswrapper[4792]: I0929 19:31:17.565500 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wsd2w\" (UniqueName: \"kubernetes.io/projected/6ee2a07b-5943-4517-be5e-e1803f9d8a55-kube-api-access-wsd2w\") on node \"crc\" DevicePath \"\"" Sep 29 19:31:17 crc kubenswrapper[4792]: I0929 19:31:17.565509 4792 reconciler_common.go:293] "Volume detached for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6ee2a07b-5943-4517-be5e-e1803f9d8a55-telemetry-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 19:31:17 crc kubenswrapper[4792]: I0929 19:31:17.565519 4792 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/6ee2a07b-5943-4517-be5e-e1803f9d8a55-openstack-edpm-ipam-ovn-default-certs-0\") on node \"crc\" DevicePath \"\"" Sep 29 19:31:17 crc kubenswrapper[4792]: I0929 19:31:17.565529 4792 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/6ee2a07b-5943-4517-be5e-e1803f9d8a55-ssh-key\") on node \"crc\" DevicePath \"\"" Sep 29 19:31:17 crc kubenswrapper[4792]: I0929 19:31:17.565540 4792 reconciler_common.go:293] "Volume detached for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6ee2a07b-5943-4517-be5e-e1803f9d8a55-bootstrap-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 19:31:17 crc kubenswrapper[4792]: I0929 19:31:17.565549 4792 reconciler_common.go:293] "Volume detached for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6ee2a07b-5943-4517-be5e-e1803f9d8a55-repo-setup-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 19:31:17 crc kubenswrapper[4792]: I0929 19:31:17.565558 4792 reconciler_common.go:293] "Volume detached for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6ee2a07b-5943-4517-be5e-e1803f9d8a55-ovn-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 19:31:17 crc kubenswrapper[4792]: I0929 19:31:17.565569 4792 reconciler_common.go:293] "Volume detached for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6ee2a07b-5943-4517-be5e-e1803f9d8a55-libvirt-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 19:31:17 crc kubenswrapper[4792]: I0929 19:31:17.565581 4792 reconciler_common.go:293] "Volume detached for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6ee2a07b-5943-4517-be5e-e1803f9d8a55-nova-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 19:31:17 crc kubenswrapper[4792]: I0929 19:31:17.957418 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-gfzmv" event={"ID":"6ee2a07b-5943-4517-be5e-e1803f9d8a55","Type":"ContainerDied","Data":"c2faaeb53b127fb156a36eea9052acea95082e95f487b36e3fff25428279febb"} Sep 29 19:31:17 crc kubenswrapper[4792]: I0929 19:31:17.957457 4792 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c2faaeb53b127fb156a36eea9052acea95082e95f487b36e3fff25428279febb" Sep 29 19:31:17 crc kubenswrapper[4792]: I0929 19:31:17.957517 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-gfzmv" Sep 29 19:31:18 crc kubenswrapper[4792]: I0929 19:31:18.087790 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-edpm-deployment-openstack-edpm-ipam-vx68g"] Sep 29 19:31:18 crc kubenswrapper[4792]: E0929 19:31:18.088513 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="af51ad14-2117-48ef-875b-c1da5013d1a9" containerName="extract-utilities" Sep 29 19:31:18 crc kubenswrapper[4792]: I0929 19:31:18.088596 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="af51ad14-2117-48ef-875b-c1da5013d1a9" containerName="extract-utilities" Sep 29 19:31:18 crc kubenswrapper[4792]: E0929 19:31:18.088701 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="af51ad14-2117-48ef-875b-c1da5013d1a9" containerName="registry-server" Sep 29 19:31:18 crc kubenswrapper[4792]: I0929 19:31:18.088769 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="af51ad14-2117-48ef-875b-c1da5013d1a9" containerName="registry-server" Sep 29 19:31:18 crc kubenswrapper[4792]: E0929 19:31:18.088861 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6ee2a07b-5943-4517-be5e-e1803f9d8a55" containerName="install-certs-edpm-deployment-openstack-edpm-ipam" Sep 29 19:31:18 crc kubenswrapper[4792]: I0929 19:31:18.088961 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="6ee2a07b-5943-4517-be5e-e1803f9d8a55" containerName="install-certs-edpm-deployment-openstack-edpm-ipam" Sep 29 19:31:18 crc kubenswrapper[4792]: E0929 19:31:18.089081 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="af51ad14-2117-48ef-875b-c1da5013d1a9" containerName="extract-content" Sep 29 19:31:18 crc kubenswrapper[4792]: I0929 19:31:18.089180 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="af51ad14-2117-48ef-875b-c1da5013d1a9" containerName="extract-content" Sep 29 19:31:18 crc kubenswrapper[4792]: I0929 19:31:18.089460 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="af51ad14-2117-48ef-875b-c1da5013d1a9" containerName="registry-server" Sep 29 19:31:18 crc kubenswrapper[4792]: I0929 19:31:18.089552 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="6ee2a07b-5943-4517-be5e-e1803f9d8a55" containerName="install-certs-edpm-deployment-openstack-edpm-ipam" Sep 29 19:31:18 crc kubenswrapper[4792]: I0929 19:31:18.090414 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-vx68g" Sep 29 19:31:18 crc kubenswrapper[4792]: I0929 19:31:18.093396 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Sep 29 19:31:18 crc kubenswrapper[4792]: I0929 19:31:18.093587 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-config" Sep 29 19:31:18 crc kubenswrapper[4792]: I0929 19:31:18.093779 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-jvdxd" Sep 29 19:31:18 crc kubenswrapper[4792]: I0929 19:31:18.093783 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Sep 29 19:31:18 crc kubenswrapper[4792]: I0929 19:31:18.093993 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Sep 29 19:31:18 crc kubenswrapper[4792]: I0929 19:31:18.103914 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-edpm-deployment-openstack-edpm-ipam-vx68g"] Sep 29 19:31:18 crc kubenswrapper[4792]: I0929 19:31:18.177732 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c694fcd6-bd39-4ec9-9b52-536c53bfff92-inventory\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-vx68g\" (UID: \"c694fcd6-bd39-4ec9-9b52-536c53bfff92\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-vx68g" Sep 29 19:31:18 crc kubenswrapper[4792]: I0929 19:31:18.177893 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/c694fcd6-bd39-4ec9-9b52-536c53bfff92-ovncontroller-config-0\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-vx68g\" (UID: \"c694fcd6-bd39-4ec9-9b52-536c53bfff92\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-vx68g" Sep 29 19:31:18 crc kubenswrapper[4792]: I0929 19:31:18.177926 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c694fcd6-bd39-4ec9-9b52-536c53bfff92-ssh-key\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-vx68g\" (UID: \"c694fcd6-bd39-4ec9-9b52-536c53bfff92\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-vx68g" Sep 29 19:31:18 crc kubenswrapper[4792]: I0929 19:31:18.177953 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bbgdx\" (UniqueName: \"kubernetes.io/projected/c694fcd6-bd39-4ec9-9b52-536c53bfff92-kube-api-access-bbgdx\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-vx68g\" (UID: \"c694fcd6-bd39-4ec9-9b52-536c53bfff92\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-vx68g" Sep 29 19:31:18 crc kubenswrapper[4792]: I0929 19:31:18.177990 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c694fcd6-bd39-4ec9-9b52-536c53bfff92-ovn-combined-ca-bundle\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-vx68g\" (UID: \"c694fcd6-bd39-4ec9-9b52-536c53bfff92\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-vx68g" Sep 29 19:31:18 crc kubenswrapper[4792]: I0929 19:31:18.280080 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/c694fcd6-bd39-4ec9-9b52-536c53bfff92-ovncontroller-config-0\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-vx68g\" (UID: \"c694fcd6-bd39-4ec9-9b52-536c53bfff92\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-vx68g" Sep 29 19:31:18 crc kubenswrapper[4792]: I0929 19:31:18.280360 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c694fcd6-bd39-4ec9-9b52-536c53bfff92-ssh-key\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-vx68g\" (UID: \"c694fcd6-bd39-4ec9-9b52-536c53bfff92\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-vx68g" Sep 29 19:31:18 crc kubenswrapper[4792]: I0929 19:31:18.280455 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bbgdx\" (UniqueName: \"kubernetes.io/projected/c694fcd6-bd39-4ec9-9b52-536c53bfff92-kube-api-access-bbgdx\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-vx68g\" (UID: \"c694fcd6-bd39-4ec9-9b52-536c53bfff92\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-vx68g" Sep 29 19:31:18 crc kubenswrapper[4792]: I0929 19:31:18.280564 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c694fcd6-bd39-4ec9-9b52-536c53bfff92-ovn-combined-ca-bundle\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-vx68g\" (UID: \"c694fcd6-bd39-4ec9-9b52-536c53bfff92\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-vx68g" Sep 29 19:31:18 crc kubenswrapper[4792]: I0929 19:31:18.280713 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c694fcd6-bd39-4ec9-9b52-536c53bfff92-inventory\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-vx68g\" (UID: \"c694fcd6-bd39-4ec9-9b52-536c53bfff92\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-vx68g" Sep 29 19:31:18 crc kubenswrapper[4792]: I0929 19:31:18.281093 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/c694fcd6-bd39-4ec9-9b52-536c53bfff92-ovncontroller-config-0\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-vx68g\" (UID: \"c694fcd6-bd39-4ec9-9b52-536c53bfff92\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-vx68g" Sep 29 19:31:18 crc kubenswrapper[4792]: I0929 19:31:18.284671 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c694fcd6-bd39-4ec9-9b52-536c53bfff92-ovn-combined-ca-bundle\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-vx68g\" (UID: \"c694fcd6-bd39-4ec9-9b52-536c53bfff92\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-vx68g" Sep 29 19:31:18 crc kubenswrapper[4792]: I0929 19:31:18.284826 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c694fcd6-bd39-4ec9-9b52-536c53bfff92-inventory\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-vx68g\" (UID: \"c694fcd6-bd39-4ec9-9b52-536c53bfff92\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-vx68g" Sep 29 19:31:18 crc kubenswrapper[4792]: I0929 19:31:18.289173 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c694fcd6-bd39-4ec9-9b52-536c53bfff92-ssh-key\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-vx68g\" (UID: \"c694fcd6-bd39-4ec9-9b52-536c53bfff92\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-vx68g" Sep 29 19:31:18 crc kubenswrapper[4792]: I0929 19:31:18.297777 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bbgdx\" (UniqueName: \"kubernetes.io/projected/c694fcd6-bd39-4ec9-9b52-536c53bfff92-kube-api-access-bbgdx\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-vx68g\" (UID: \"c694fcd6-bd39-4ec9-9b52-536c53bfff92\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-vx68g" Sep 29 19:31:18 crc kubenswrapper[4792]: I0929 19:31:18.462599 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-vx68g" Sep 29 19:31:19 crc kubenswrapper[4792]: I0929 19:31:19.011120 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-edpm-deployment-openstack-edpm-ipam-vx68g"] Sep 29 19:31:19 crc kubenswrapper[4792]: I0929 19:31:19.977938 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-vx68g" event={"ID":"c694fcd6-bd39-4ec9-9b52-536c53bfff92","Type":"ContainerStarted","Data":"68c7e9c6d9fe92a597022c714c0af86e3afc59d2ebded93d333f5da73fbbccb9"} Sep 29 19:31:19 crc kubenswrapper[4792]: I0929 19:31:19.979615 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-vx68g" event={"ID":"c694fcd6-bd39-4ec9-9b52-536c53bfff92","Type":"ContainerStarted","Data":"4075dd8c85969985721bbee05dd8dc98135d672160d64d2cda2a2169e653abbf"} Sep 29 19:31:20 crc kubenswrapper[4792]: I0929 19:31:20.003634 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-vx68g" podStartSLOduration=1.833619999 podStartE2EDuration="2.003607965s" podCreationTimestamp="2025-09-29 19:31:18 +0000 UTC" firstStartedPulling="2025-09-29 19:31:19.019464705 +0000 UTC m=+2091.012772101" lastFinishedPulling="2025-09-29 19:31:19.189452671 +0000 UTC m=+2091.182760067" observedRunningTime="2025-09-29 19:31:19.995313008 +0000 UTC m=+2091.988620414" watchObservedRunningTime="2025-09-29 19:31:20.003607965 +0000 UTC m=+2091.996915371" Sep 29 19:31:28 crc kubenswrapper[4792]: I0929 19:31:28.606356 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-7fmgn"] Sep 29 19:31:28 crc kubenswrapper[4792]: I0929 19:31:28.608651 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-7fmgn" Sep 29 19:31:28 crc kubenswrapper[4792]: I0929 19:31:28.621584 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-7fmgn"] Sep 29 19:31:28 crc kubenswrapper[4792]: I0929 19:31:28.680664 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/03c4acaa-4ef6-4bc0-b475-5a3d05285993-catalog-content\") pod \"redhat-marketplace-7fmgn\" (UID: \"03c4acaa-4ef6-4bc0-b475-5a3d05285993\") " pod="openshift-marketplace/redhat-marketplace-7fmgn" Sep 29 19:31:28 crc kubenswrapper[4792]: I0929 19:31:28.680911 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/03c4acaa-4ef6-4bc0-b475-5a3d05285993-utilities\") pod \"redhat-marketplace-7fmgn\" (UID: \"03c4acaa-4ef6-4bc0-b475-5a3d05285993\") " pod="openshift-marketplace/redhat-marketplace-7fmgn" Sep 29 19:31:28 crc kubenswrapper[4792]: I0929 19:31:28.681313 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-69tb5\" (UniqueName: \"kubernetes.io/projected/03c4acaa-4ef6-4bc0-b475-5a3d05285993-kube-api-access-69tb5\") pod \"redhat-marketplace-7fmgn\" (UID: \"03c4acaa-4ef6-4bc0-b475-5a3d05285993\") " pod="openshift-marketplace/redhat-marketplace-7fmgn" Sep 29 19:31:28 crc kubenswrapper[4792]: I0929 19:31:28.783181 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/03c4acaa-4ef6-4bc0-b475-5a3d05285993-catalog-content\") pod \"redhat-marketplace-7fmgn\" (UID: \"03c4acaa-4ef6-4bc0-b475-5a3d05285993\") " pod="openshift-marketplace/redhat-marketplace-7fmgn" Sep 29 19:31:28 crc kubenswrapper[4792]: I0929 19:31:28.783549 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/03c4acaa-4ef6-4bc0-b475-5a3d05285993-utilities\") pod \"redhat-marketplace-7fmgn\" (UID: \"03c4acaa-4ef6-4bc0-b475-5a3d05285993\") " pod="openshift-marketplace/redhat-marketplace-7fmgn" Sep 29 19:31:28 crc kubenswrapper[4792]: I0929 19:31:28.783746 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-69tb5\" (UniqueName: \"kubernetes.io/projected/03c4acaa-4ef6-4bc0-b475-5a3d05285993-kube-api-access-69tb5\") pod \"redhat-marketplace-7fmgn\" (UID: \"03c4acaa-4ef6-4bc0-b475-5a3d05285993\") " pod="openshift-marketplace/redhat-marketplace-7fmgn" Sep 29 19:31:28 crc kubenswrapper[4792]: I0929 19:31:28.783902 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/03c4acaa-4ef6-4bc0-b475-5a3d05285993-catalog-content\") pod \"redhat-marketplace-7fmgn\" (UID: \"03c4acaa-4ef6-4bc0-b475-5a3d05285993\") " pod="openshift-marketplace/redhat-marketplace-7fmgn" Sep 29 19:31:28 crc kubenswrapper[4792]: I0929 19:31:28.784147 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/03c4acaa-4ef6-4bc0-b475-5a3d05285993-utilities\") pod \"redhat-marketplace-7fmgn\" (UID: \"03c4acaa-4ef6-4bc0-b475-5a3d05285993\") " pod="openshift-marketplace/redhat-marketplace-7fmgn" Sep 29 19:31:28 crc kubenswrapper[4792]: I0929 19:31:28.819629 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-69tb5\" (UniqueName: \"kubernetes.io/projected/03c4acaa-4ef6-4bc0-b475-5a3d05285993-kube-api-access-69tb5\") pod \"redhat-marketplace-7fmgn\" (UID: \"03c4acaa-4ef6-4bc0-b475-5a3d05285993\") " pod="openshift-marketplace/redhat-marketplace-7fmgn" Sep 29 19:31:28 crc kubenswrapper[4792]: I0929 19:31:28.930124 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-7fmgn" Sep 29 19:31:29 crc kubenswrapper[4792]: I0929 19:31:29.379220 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-7fmgn"] Sep 29 19:31:30 crc kubenswrapper[4792]: I0929 19:31:30.070676 4792 generic.go:334] "Generic (PLEG): container finished" podID="03c4acaa-4ef6-4bc0-b475-5a3d05285993" containerID="241978d14b1b1e295f9cda35b208f63470fce4e145f76d6bea05948acc6c0bbc" exitCode=0 Sep 29 19:31:30 crc kubenswrapper[4792]: I0929 19:31:30.070779 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-7fmgn" event={"ID":"03c4acaa-4ef6-4bc0-b475-5a3d05285993","Type":"ContainerDied","Data":"241978d14b1b1e295f9cda35b208f63470fce4e145f76d6bea05948acc6c0bbc"} Sep 29 19:31:30 crc kubenswrapper[4792]: I0929 19:31:30.071043 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-7fmgn" event={"ID":"03c4acaa-4ef6-4bc0-b475-5a3d05285993","Type":"ContainerStarted","Data":"bd91ebc207fa960fd24b14eeb6634cf3a525712576bbbf8af60fc5dbc451676e"} Sep 29 19:31:31 crc kubenswrapper[4792]: I0929 19:31:31.083127 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-7fmgn" event={"ID":"03c4acaa-4ef6-4bc0-b475-5a3d05285993","Type":"ContainerStarted","Data":"ab7e780b8693c5715d9a5644d0ee845d4e53431c0dfed6c4d4ab7473ac43048f"} Sep 29 19:31:32 crc kubenswrapper[4792]: I0929 19:31:32.096726 4792 generic.go:334] "Generic (PLEG): container finished" podID="03c4acaa-4ef6-4bc0-b475-5a3d05285993" containerID="ab7e780b8693c5715d9a5644d0ee845d4e53431c0dfed6c4d4ab7473ac43048f" exitCode=0 Sep 29 19:31:32 crc kubenswrapper[4792]: I0929 19:31:32.096957 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-7fmgn" event={"ID":"03c4acaa-4ef6-4bc0-b475-5a3d05285993","Type":"ContainerDied","Data":"ab7e780b8693c5715d9a5644d0ee845d4e53431c0dfed6c4d4ab7473ac43048f"} Sep 29 19:31:33 crc kubenswrapper[4792]: I0929 19:31:33.109632 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-7fmgn" event={"ID":"03c4acaa-4ef6-4bc0-b475-5a3d05285993","Type":"ContainerStarted","Data":"0b872ad34ad652e3dc888296d26055a19ed16ed970da07d09e0c18382c782c1d"} Sep 29 19:31:33 crc kubenswrapper[4792]: I0929 19:31:33.140167 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-7fmgn" podStartSLOduration=2.688469408 podStartE2EDuration="5.14014661s" podCreationTimestamp="2025-09-29 19:31:28 +0000 UTC" firstStartedPulling="2025-09-29 19:31:30.075156328 +0000 UTC m=+2102.068463754" lastFinishedPulling="2025-09-29 19:31:32.52683352 +0000 UTC m=+2104.520140956" observedRunningTime="2025-09-29 19:31:33.136734631 +0000 UTC m=+2105.130042057" watchObservedRunningTime="2025-09-29 19:31:33.14014661 +0000 UTC m=+2105.133454046" Sep 29 19:31:38 crc kubenswrapper[4792]: I0929 19:31:38.931093 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-7fmgn" Sep 29 19:31:38 crc kubenswrapper[4792]: I0929 19:31:38.931791 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-7fmgn" Sep 29 19:31:38 crc kubenswrapper[4792]: I0929 19:31:38.998379 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-7fmgn" Sep 29 19:31:39 crc kubenswrapper[4792]: I0929 19:31:39.202993 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-7fmgn" Sep 29 19:31:39 crc kubenswrapper[4792]: I0929 19:31:39.253925 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-7fmgn"] Sep 29 19:31:41 crc kubenswrapper[4792]: I0929 19:31:41.176524 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-7fmgn" podUID="03c4acaa-4ef6-4bc0-b475-5a3d05285993" containerName="registry-server" containerID="cri-o://0b872ad34ad652e3dc888296d26055a19ed16ed970da07d09e0c18382c782c1d" gracePeriod=2 Sep 29 19:31:41 crc kubenswrapper[4792]: I0929 19:31:41.662524 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-7fmgn" Sep 29 19:31:41 crc kubenswrapper[4792]: I0929 19:31:41.744483 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-69tb5\" (UniqueName: \"kubernetes.io/projected/03c4acaa-4ef6-4bc0-b475-5a3d05285993-kube-api-access-69tb5\") pod \"03c4acaa-4ef6-4bc0-b475-5a3d05285993\" (UID: \"03c4acaa-4ef6-4bc0-b475-5a3d05285993\") " Sep 29 19:31:41 crc kubenswrapper[4792]: I0929 19:31:41.744707 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/03c4acaa-4ef6-4bc0-b475-5a3d05285993-catalog-content\") pod \"03c4acaa-4ef6-4bc0-b475-5a3d05285993\" (UID: \"03c4acaa-4ef6-4bc0-b475-5a3d05285993\") " Sep 29 19:31:41 crc kubenswrapper[4792]: I0929 19:31:41.744749 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/03c4acaa-4ef6-4bc0-b475-5a3d05285993-utilities\") pod \"03c4acaa-4ef6-4bc0-b475-5a3d05285993\" (UID: \"03c4acaa-4ef6-4bc0-b475-5a3d05285993\") " Sep 29 19:31:41 crc kubenswrapper[4792]: I0929 19:31:41.745716 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/03c4acaa-4ef6-4bc0-b475-5a3d05285993-utilities" (OuterVolumeSpecName: "utilities") pod "03c4acaa-4ef6-4bc0-b475-5a3d05285993" (UID: "03c4acaa-4ef6-4bc0-b475-5a3d05285993"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 19:31:41 crc kubenswrapper[4792]: I0929 19:31:41.752510 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/03c4acaa-4ef6-4bc0-b475-5a3d05285993-kube-api-access-69tb5" (OuterVolumeSpecName: "kube-api-access-69tb5") pod "03c4acaa-4ef6-4bc0-b475-5a3d05285993" (UID: "03c4acaa-4ef6-4bc0-b475-5a3d05285993"). InnerVolumeSpecName "kube-api-access-69tb5". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:31:41 crc kubenswrapper[4792]: I0929 19:31:41.761591 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/03c4acaa-4ef6-4bc0-b475-5a3d05285993-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "03c4acaa-4ef6-4bc0-b475-5a3d05285993" (UID: "03c4acaa-4ef6-4bc0-b475-5a3d05285993"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 19:31:41 crc kubenswrapper[4792]: I0929 19:31:41.846588 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-69tb5\" (UniqueName: \"kubernetes.io/projected/03c4acaa-4ef6-4bc0-b475-5a3d05285993-kube-api-access-69tb5\") on node \"crc\" DevicePath \"\"" Sep 29 19:31:41 crc kubenswrapper[4792]: I0929 19:31:41.846617 4792 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/03c4acaa-4ef6-4bc0-b475-5a3d05285993-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 19:31:41 crc kubenswrapper[4792]: I0929 19:31:41.846627 4792 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/03c4acaa-4ef6-4bc0-b475-5a3d05285993-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 19:31:42 crc kubenswrapper[4792]: I0929 19:31:42.190282 4792 generic.go:334] "Generic (PLEG): container finished" podID="03c4acaa-4ef6-4bc0-b475-5a3d05285993" containerID="0b872ad34ad652e3dc888296d26055a19ed16ed970da07d09e0c18382c782c1d" exitCode=0 Sep 29 19:31:42 crc kubenswrapper[4792]: I0929 19:31:42.190321 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-7fmgn" event={"ID":"03c4acaa-4ef6-4bc0-b475-5a3d05285993","Type":"ContainerDied","Data":"0b872ad34ad652e3dc888296d26055a19ed16ed970da07d09e0c18382c782c1d"} Sep 29 19:31:42 crc kubenswrapper[4792]: I0929 19:31:42.190380 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-7fmgn" event={"ID":"03c4acaa-4ef6-4bc0-b475-5a3d05285993","Type":"ContainerDied","Data":"bd91ebc207fa960fd24b14eeb6634cf3a525712576bbbf8af60fc5dbc451676e"} Sep 29 19:31:42 crc kubenswrapper[4792]: I0929 19:31:42.190388 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-7fmgn" Sep 29 19:31:42 crc kubenswrapper[4792]: I0929 19:31:42.190401 4792 scope.go:117] "RemoveContainer" containerID="0b872ad34ad652e3dc888296d26055a19ed16ed970da07d09e0c18382c782c1d" Sep 29 19:31:42 crc kubenswrapper[4792]: I0929 19:31:42.210485 4792 scope.go:117] "RemoveContainer" containerID="ab7e780b8693c5715d9a5644d0ee845d4e53431c0dfed6c4d4ab7473ac43048f" Sep 29 19:31:42 crc kubenswrapper[4792]: I0929 19:31:42.244565 4792 scope.go:117] "RemoveContainer" containerID="241978d14b1b1e295f9cda35b208f63470fce4e145f76d6bea05948acc6c0bbc" Sep 29 19:31:42 crc kubenswrapper[4792]: I0929 19:31:42.249492 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-7fmgn"] Sep 29 19:31:42 crc kubenswrapper[4792]: I0929 19:31:42.257599 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-7fmgn"] Sep 29 19:31:42 crc kubenswrapper[4792]: I0929 19:31:42.287297 4792 scope.go:117] "RemoveContainer" containerID="0b872ad34ad652e3dc888296d26055a19ed16ed970da07d09e0c18382c782c1d" Sep 29 19:31:42 crc kubenswrapper[4792]: E0929 19:31:42.287783 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0b872ad34ad652e3dc888296d26055a19ed16ed970da07d09e0c18382c782c1d\": container with ID starting with 0b872ad34ad652e3dc888296d26055a19ed16ed970da07d09e0c18382c782c1d not found: ID does not exist" containerID="0b872ad34ad652e3dc888296d26055a19ed16ed970da07d09e0c18382c782c1d" Sep 29 19:31:42 crc kubenswrapper[4792]: I0929 19:31:42.287815 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0b872ad34ad652e3dc888296d26055a19ed16ed970da07d09e0c18382c782c1d"} err="failed to get container status \"0b872ad34ad652e3dc888296d26055a19ed16ed970da07d09e0c18382c782c1d\": rpc error: code = NotFound desc = could not find container \"0b872ad34ad652e3dc888296d26055a19ed16ed970da07d09e0c18382c782c1d\": container with ID starting with 0b872ad34ad652e3dc888296d26055a19ed16ed970da07d09e0c18382c782c1d not found: ID does not exist" Sep 29 19:31:42 crc kubenswrapper[4792]: I0929 19:31:42.287835 4792 scope.go:117] "RemoveContainer" containerID="ab7e780b8693c5715d9a5644d0ee845d4e53431c0dfed6c4d4ab7473ac43048f" Sep 29 19:31:42 crc kubenswrapper[4792]: E0929 19:31:42.288256 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ab7e780b8693c5715d9a5644d0ee845d4e53431c0dfed6c4d4ab7473ac43048f\": container with ID starting with ab7e780b8693c5715d9a5644d0ee845d4e53431c0dfed6c4d4ab7473ac43048f not found: ID does not exist" containerID="ab7e780b8693c5715d9a5644d0ee845d4e53431c0dfed6c4d4ab7473ac43048f" Sep 29 19:31:42 crc kubenswrapper[4792]: I0929 19:31:42.288282 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ab7e780b8693c5715d9a5644d0ee845d4e53431c0dfed6c4d4ab7473ac43048f"} err="failed to get container status \"ab7e780b8693c5715d9a5644d0ee845d4e53431c0dfed6c4d4ab7473ac43048f\": rpc error: code = NotFound desc = could not find container \"ab7e780b8693c5715d9a5644d0ee845d4e53431c0dfed6c4d4ab7473ac43048f\": container with ID starting with ab7e780b8693c5715d9a5644d0ee845d4e53431c0dfed6c4d4ab7473ac43048f not found: ID does not exist" Sep 29 19:31:42 crc kubenswrapper[4792]: I0929 19:31:42.288295 4792 scope.go:117] "RemoveContainer" containerID="241978d14b1b1e295f9cda35b208f63470fce4e145f76d6bea05948acc6c0bbc" Sep 29 19:31:42 crc kubenswrapper[4792]: E0929 19:31:42.288693 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"241978d14b1b1e295f9cda35b208f63470fce4e145f76d6bea05948acc6c0bbc\": container with ID starting with 241978d14b1b1e295f9cda35b208f63470fce4e145f76d6bea05948acc6c0bbc not found: ID does not exist" containerID="241978d14b1b1e295f9cda35b208f63470fce4e145f76d6bea05948acc6c0bbc" Sep 29 19:31:42 crc kubenswrapper[4792]: I0929 19:31:42.288772 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"241978d14b1b1e295f9cda35b208f63470fce4e145f76d6bea05948acc6c0bbc"} err="failed to get container status \"241978d14b1b1e295f9cda35b208f63470fce4e145f76d6bea05948acc6c0bbc\": rpc error: code = NotFound desc = could not find container \"241978d14b1b1e295f9cda35b208f63470fce4e145f76d6bea05948acc6c0bbc\": container with ID starting with 241978d14b1b1e295f9cda35b208f63470fce4e145f76d6bea05948acc6c0bbc not found: ID does not exist" Sep 29 19:31:43 crc kubenswrapper[4792]: I0929 19:31:43.031993 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="03c4acaa-4ef6-4bc0-b475-5a3d05285993" path="/var/lib/kubelet/pods/03c4acaa-4ef6-4bc0-b475-5a3d05285993/volumes" Sep 29 19:31:49 crc kubenswrapper[4792]: I0929 19:31:49.074178 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-ffc25"] Sep 29 19:31:49 crc kubenswrapper[4792]: E0929 19:31:49.075166 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="03c4acaa-4ef6-4bc0-b475-5a3d05285993" containerName="extract-content" Sep 29 19:31:49 crc kubenswrapper[4792]: I0929 19:31:49.075181 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="03c4acaa-4ef6-4bc0-b475-5a3d05285993" containerName="extract-content" Sep 29 19:31:49 crc kubenswrapper[4792]: E0929 19:31:49.075240 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="03c4acaa-4ef6-4bc0-b475-5a3d05285993" containerName="extract-utilities" Sep 29 19:31:49 crc kubenswrapper[4792]: I0929 19:31:49.075249 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="03c4acaa-4ef6-4bc0-b475-5a3d05285993" containerName="extract-utilities" Sep 29 19:31:49 crc kubenswrapper[4792]: E0929 19:31:49.075265 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="03c4acaa-4ef6-4bc0-b475-5a3d05285993" containerName="registry-server" Sep 29 19:31:49 crc kubenswrapper[4792]: I0929 19:31:49.075274 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="03c4acaa-4ef6-4bc0-b475-5a3d05285993" containerName="registry-server" Sep 29 19:31:49 crc kubenswrapper[4792]: I0929 19:31:49.075530 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="03c4acaa-4ef6-4bc0-b475-5a3d05285993" containerName="registry-server" Sep 29 19:31:49 crc kubenswrapper[4792]: I0929 19:31:49.077295 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-ffc25" Sep 29 19:31:49 crc kubenswrapper[4792]: I0929 19:31:49.089475 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-ffc25"] Sep 29 19:31:49 crc kubenswrapper[4792]: I0929 19:31:49.100121 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d569fd41-d679-4b6a-bf0c-519d5e157955-catalog-content\") pod \"certified-operators-ffc25\" (UID: \"d569fd41-d679-4b6a-bf0c-519d5e157955\") " pod="openshift-marketplace/certified-operators-ffc25" Sep 29 19:31:49 crc kubenswrapper[4792]: I0929 19:31:49.100208 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d569fd41-d679-4b6a-bf0c-519d5e157955-utilities\") pod \"certified-operators-ffc25\" (UID: \"d569fd41-d679-4b6a-bf0c-519d5e157955\") " pod="openshift-marketplace/certified-operators-ffc25" Sep 29 19:31:49 crc kubenswrapper[4792]: I0929 19:31:49.100309 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j55fd\" (UniqueName: \"kubernetes.io/projected/d569fd41-d679-4b6a-bf0c-519d5e157955-kube-api-access-j55fd\") pod \"certified-operators-ffc25\" (UID: \"d569fd41-d679-4b6a-bf0c-519d5e157955\") " pod="openshift-marketplace/certified-operators-ffc25" Sep 29 19:31:49 crc kubenswrapper[4792]: I0929 19:31:49.201409 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d569fd41-d679-4b6a-bf0c-519d5e157955-utilities\") pod \"certified-operators-ffc25\" (UID: \"d569fd41-d679-4b6a-bf0c-519d5e157955\") " pod="openshift-marketplace/certified-operators-ffc25" Sep 29 19:31:49 crc kubenswrapper[4792]: I0929 19:31:49.201748 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j55fd\" (UniqueName: \"kubernetes.io/projected/d569fd41-d679-4b6a-bf0c-519d5e157955-kube-api-access-j55fd\") pod \"certified-operators-ffc25\" (UID: \"d569fd41-d679-4b6a-bf0c-519d5e157955\") " pod="openshift-marketplace/certified-operators-ffc25" Sep 29 19:31:49 crc kubenswrapper[4792]: I0929 19:31:49.201823 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d569fd41-d679-4b6a-bf0c-519d5e157955-catalog-content\") pod \"certified-operators-ffc25\" (UID: \"d569fd41-d679-4b6a-bf0c-519d5e157955\") " pod="openshift-marketplace/certified-operators-ffc25" Sep 29 19:31:49 crc kubenswrapper[4792]: I0929 19:31:49.202053 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d569fd41-d679-4b6a-bf0c-519d5e157955-utilities\") pod \"certified-operators-ffc25\" (UID: \"d569fd41-d679-4b6a-bf0c-519d5e157955\") " pod="openshift-marketplace/certified-operators-ffc25" Sep 29 19:31:49 crc kubenswrapper[4792]: I0929 19:31:49.202190 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d569fd41-d679-4b6a-bf0c-519d5e157955-catalog-content\") pod \"certified-operators-ffc25\" (UID: \"d569fd41-d679-4b6a-bf0c-519d5e157955\") " pod="openshift-marketplace/certified-operators-ffc25" Sep 29 19:31:49 crc kubenswrapper[4792]: I0929 19:31:49.228864 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j55fd\" (UniqueName: \"kubernetes.io/projected/d569fd41-d679-4b6a-bf0c-519d5e157955-kube-api-access-j55fd\") pod \"certified-operators-ffc25\" (UID: \"d569fd41-d679-4b6a-bf0c-519d5e157955\") " pod="openshift-marketplace/certified-operators-ffc25" Sep 29 19:31:49 crc kubenswrapper[4792]: I0929 19:31:49.400677 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-ffc25" Sep 29 19:31:49 crc kubenswrapper[4792]: I0929 19:31:49.794635 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-ffc25"] Sep 29 19:31:50 crc kubenswrapper[4792]: I0929 19:31:50.259444 4792 generic.go:334] "Generic (PLEG): container finished" podID="d569fd41-d679-4b6a-bf0c-519d5e157955" containerID="07de77292c620a9c523f57373d70b9d704e37cc37f2bdc3725d4b0a1ab263373" exitCode=0 Sep 29 19:31:50 crc kubenswrapper[4792]: I0929 19:31:50.259489 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-ffc25" event={"ID":"d569fd41-d679-4b6a-bf0c-519d5e157955","Type":"ContainerDied","Data":"07de77292c620a9c523f57373d70b9d704e37cc37f2bdc3725d4b0a1ab263373"} Sep 29 19:31:50 crc kubenswrapper[4792]: I0929 19:31:50.259647 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-ffc25" event={"ID":"d569fd41-d679-4b6a-bf0c-519d5e157955","Type":"ContainerStarted","Data":"b88ba67c412a8dc2ba179dbbda08a9aee4d7052ebf98c3291cc3fbb0044aa716"} Sep 29 19:31:51 crc kubenswrapper[4792]: I0929 19:31:51.270238 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-ffc25" event={"ID":"d569fd41-d679-4b6a-bf0c-519d5e157955","Type":"ContainerStarted","Data":"8265d955f630e95311f1ec768b650e5d7aa9738e52f90db3759ceb5058a0288e"} Sep 29 19:31:53 crc kubenswrapper[4792]: I0929 19:31:53.290134 4792 generic.go:334] "Generic (PLEG): container finished" podID="d569fd41-d679-4b6a-bf0c-519d5e157955" containerID="8265d955f630e95311f1ec768b650e5d7aa9738e52f90db3759ceb5058a0288e" exitCode=0 Sep 29 19:31:53 crc kubenswrapper[4792]: I0929 19:31:53.290208 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-ffc25" event={"ID":"d569fd41-d679-4b6a-bf0c-519d5e157955","Type":"ContainerDied","Data":"8265d955f630e95311f1ec768b650e5d7aa9738e52f90db3759ceb5058a0288e"} Sep 29 19:31:54 crc kubenswrapper[4792]: I0929 19:31:54.303634 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-ffc25" event={"ID":"d569fd41-d679-4b6a-bf0c-519d5e157955","Type":"ContainerStarted","Data":"94df22f26a23765b586ecddf739d6f1aacea23c22eb2fd71e56e12419a95850d"} Sep 29 19:31:54 crc kubenswrapper[4792]: I0929 19:31:54.326588 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-ffc25" podStartSLOduration=1.821414093 podStartE2EDuration="5.326565857s" podCreationTimestamp="2025-09-29 19:31:49 +0000 UTC" firstStartedPulling="2025-09-29 19:31:50.261013567 +0000 UTC m=+2122.254320953" lastFinishedPulling="2025-09-29 19:31:53.766165281 +0000 UTC m=+2125.759472717" observedRunningTime="2025-09-29 19:31:54.320107738 +0000 UTC m=+2126.313415144" watchObservedRunningTime="2025-09-29 19:31:54.326565857 +0000 UTC m=+2126.319873253" Sep 29 19:31:59 crc kubenswrapper[4792]: I0929 19:31:59.401055 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-ffc25" Sep 29 19:31:59 crc kubenswrapper[4792]: I0929 19:31:59.402183 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-ffc25" Sep 29 19:31:59 crc kubenswrapper[4792]: I0929 19:31:59.462718 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-ffc25" Sep 29 19:32:00 crc kubenswrapper[4792]: I0929 19:32:00.409255 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-ffc25" Sep 29 19:32:00 crc kubenswrapper[4792]: I0929 19:32:00.475559 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-ffc25"] Sep 29 19:32:02 crc kubenswrapper[4792]: I0929 19:32:02.373532 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-ffc25" podUID="d569fd41-d679-4b6a-bf0c-519d5e157955" containerName="registry-server" containerID="cri-o://94df22f26a23765b586ecddf739d6f1aacea23c22eb2fd71e56e12419a95850d" gracePeriod=2 Sep 29 19:32:02 crc kubenswrapper[4792]: I0929 19:32:02.907007 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-ffc25" Sep 29 19:32:03 crc kubenswrapper[4792]: I0929 19:32:03.076311 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d569fd41-d679-4b6a-bf0c-519d5e157955-catalog-content\") pod \"d569fd41-d679-4b6a-bf0c-519d5e157955\" (UID: \"d569fd41-d679-4b6a-bf0c-519d5e157955\") " Sep 29 19:32:03 crc kubenswrapper[4792]: I0929 19:32:03.076457 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d569fd41-d679-4b6a-bf0c-519d5e157955-utilities\") pod \"d569fd41-d679-4b6a-bf0c-519d5e157955\" (UID: \"d569fd41-d679-4b6a-bf0c-519d5e157955\") " Sep 29 19:32:03 crc kubenswrapper[4792]: I0929 19:32:03.076494 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-j55fd\" (UniqueName: \"kubernetes.io/projected/d569fd41-d679-4b6a-bf0c-519d5e157955-kube-api-access-j55fd\") pod \"d569fd41-d679-4b6a-bf0c-519d5e157955\" (UID: \"d569fd41-d679-4b6a-bf0c-519d5e157955\") " Sep 29 19:32:03 crc kubenswrapper[4792]: I0929 19:32:03.077169 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d569fd41-d679-4b6a-bf0c-519d5e157955-utilities" (OuterVolumeSpecName: "utilities") pod "d569fd41-d679-4b6a-bf0c-519d5e157955" (UID: "d569fd41-d679-4b6a-bf0c-519d5e157955"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 19:32:03 crc kubenswrapper[4792]: I0929 19:32:03.085556 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d569fd41-d679-4b6a-bf0c-519d5e157955-kube-api-access-j55fd" (OuterVolumeSpecName: "kube-api-access-j55fd") pod "d569fd41-d679-4b6a-bf0c-519d5e157955" (UID: "d569fd41-d679-4b6a-bf0c-519d5e157955"). InnerVolumeSpecName "kube-api-access-j55fd". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:32:03 crc kubenswrapper[4792]: I0929 19:32:03.136099 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d569fd41-d679-4b6a-bf0c-519d5e157955-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "d569fd41-d679-4b6a-bf0c-519d5e157955" (UID: "d569fd41-d679-4b6a-bf0c-519d5e157955"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 19:32:03 crc kubenswrapper[4792]: I0929 19:32:03.179343 4792 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d569fd41-d679-4b6a-bf0c-519d5e157955-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 19:32:03 crc kubenswrapper[4792]: I0929 19:32:03.179378 4792 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d569fd41-d679-4b6a-bf0c-519d5e157955-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 19:32:03 crc kubenswrapper[4792]: I0929 19:32:03.179391 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-j55fd\" (UniqueName: \"kubernetes.io/projected/d569fd41-d679-4b6a-bf0c-519d5e157955-kube-api-access-j55fd\") on node \"crc\" DevicePath \"\"" Sep 29 19:32:03 crc kubenswrapper[4792]: I0929 19:32:03.390777 4792 generic.go:334] "Generic (PLEG): container finished" podID="d569fd41-d679-4b6a-bf0c-519d5e157955" containerID="94df22f26a23765b586ecddf739d6f1aacea23c22eb2fd71e56e12419a95850d" exitCode=0 Sep 29 19:32:03 crc kubenswrapper[4792]: I0929 19:32:03.390838 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-ffc25" event={"ID":"d569fd41-d679-4b6a-bf0c-519d5e157955","Type":"ContainerDied","Data":"94df22f26a23765b586ecddf739d6f1aacea23c22eb2fd71e56e12419a95850d"} Sep 29 19:32:03 crc kubenswrapper[4792]: I0929 19:32:03.390979 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-ffc25" event={"ID":"d569fd41-d679-4b6a-bf0c-519d5e157955","Type":"ContainerDied","Data":"b88ba67c412a8dc2ba179dbbda08a9aee4d7052ebf98c3291cc3fbb0044aa716"} Sep 29 19:32:03 crc kubenswrapper[4792]: I0929 19:32:03.391027 4792 scope.go:117] "RemoveContainer" containerID="94df22f26a23765b586ecddf739d6f1aacea23c22eb2fd71e56e12419a95850d" Sep 29 19:32:03 crc kubenswrapper[4792]: I0929 19:32:03.391915 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-ffc25" Sep 29 19:32:03 crc kubenswrapper[4792]: I0929 19:32:03.430231 4792 scope.go:117] "RemoveContainer" containerID="8265d955f630e95311f1ec768b650e5d7aa9738e52f90db3759ceb5058a0288e" Sep 29 19:32:03 crc kubenswrapper[4792]: I0929 19:32:03.434078 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-ffc25"] Sep 29 19:32:03 crc kubenswrapper[4792]: I0929 19:32:03.442930 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-ffc25"] Sep 29 19:32:03 crc kubenswrapper[4792]: I0929 19:32:03.466026 4792 scope.go:117] "RemoveContainer" containerID="07de77292c620a9c523f57373d70b9d704e37cc37f2bdc3725d4b0a1ab263373" Sep 29 19:32:03 crc kubenswrapper[4792]: I0929 19:32:03.508995 4792 scope.go:117] "RemoveContainer" containerID="94df22f26a23765b586ecddf739d6f1aacea23c22eb2fd71e56e12419a95850d" Sep 29 19:32:03 crc kubenswrapper[4792]: E0929 19:32:03.509514 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"94df22f26a23765b586ecddf739d6f1aacea23c22eb2fd71e56e12419a95850d\": container with ID starting with 94df22f26a23765b586ecddf739d6f1aacea23c22eb2fd71e56e12419a95850d not found: ID does not exist" containerID="94df22f26a23765b586ecddf739d6f1aacea23c22eb2fd71e56e12419a95850d" Sep 29 19:32:03 crc kubenswrapper[4792]: I0929 19:32:03.509552 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"94df22f26a23765b586ecddf739d6f1aacea23c22eb2fd71e56e12419a95850d"} err="failed to get container status \"94df22f26a23765b586ecddf739d6f1aacea23c22eb2fd71e56e12419a95850d\": rpc error: code = NotFound desc = could not find container \"94df22f26a23765b586ecddf739d6f1aacea23c22eb2fd71e56e12419a95850d\": container with ID starting with 94df22f26a23765b586ecddf739d6f1aacea23c22eb2fd71e56e12419a95850d not found: ID does not exist" Sep 29 19:32:03 crc kubenswrapper[4792]: I0929 19:32:03.509582 4792 scope.go:117] "RemoveContainer" containerID="8265d955f630e95311f1ec768b650e5d7aa9738e52f90db3759ceb5058a0288e" Sep 29 19:32:03 crc kubenswrapper[4792]: E0929 19:32:03.510058 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8265d955f630e95311f1ec768b650e5d7aa9738e52f90db3759ceb5058a0288e\": container with ID starting with 8265d955f630e95311f1ec768b650e5d7aa9738e52f90db3759ceb5058a0288e not found: ID does not exist" containerID="8265d955f630e95311f1ec768b650e5d7aa9738e52f90db3759ceb5058a0288e" Sep 29 19:32:03 crc kubenswrapper[4792]: I0929 19:32:03.510134 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8265d955f630e95311f1ec768b650e5d7aa9738e52f90db3759ceb5058a0288e"} err="failed to get container status \"8265d955f630e95311f1ec768b650e5d7aa9738e52f90db3759ceb5058a0288e\": rpc error: code = NotFound desc = could not find container \"8265d955f630e95311f1ec768b650e5d7aa9738e52f90db3759ceb5058a0288e\": container with ID starting with 8265d955f630e95311f1ec768b650e5d7aa9738e52f90db3759ceb5058a0288e not found: ID does not exist" Sep 29 19:32:03 crc kubenswrapper[4792]: I0929 19:32:03.510171 4792 scope.go:117] "RemoveContainer" containerID="07de77292c620a9c523f57373d70b9d704e37cc37f2bdc3725d4b0a1ab263373" Sep 29 19:32:03 crc kubenswrapper[4792]: E0929 19:32:03.510438 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"07de77292c620a9c523f57373d70b9d704e37cc37f2bdc3725d4b0a1ab263373\": container with ID starting with 07de77292c620a9c523f57373d70b9d704e37cc37f2bdc3725d4b0a1ab263373 not found: ID does not exist" containerID="07de77292c620a9c523f57373d70b9d704e37cc37f2bdc3725d4b0a1ab263373" Sep 29 19:32:03 crc kubenswrapper[4792]: I0929 19:32:03.510476 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"07de77292c620a9c523f57373d70b9d704e37cc37f2bdc3725d4b0a1ab263373"} err="failed to get container status \"07de77292c620a9c523f57373d70b9d704e37cc37f2bdc3725d4b0a1ab263373\": rpc error: code = NotFound desc = could not find container \"07de77292c620a9c523f57373d70b9d704e37cc37f2bdc3725d4b0a1ab263373\": container with ID starting with 07de77292c620a9c523f57373d70b9d704e37cc37f2bdc3725d4b0a1ab263373 not found: ID does not exist" Sep 29 19:32:05 crc kubenswrapper[4792]: I0929 19:32:05.029107 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d569fd41-d679-4b6a-bf0c-519d5e157955" path="/var/lib/kubelet/pods/d569fd41-d679-4b6a-bf0c-519d5e157955/volumes" Sep 29 19:32:11 crc kubenswrapper[4792]: I0929 19:32:11.960255 4792 patch_prober.go:28] interesting pod/machine-config-daemon-p5q59 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 19:32:11 crc kubenswrapper[4792]: I0929 19:32:11.960497 4792 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" podUID="0ae66548-086e-4ca9-bd6f-281ce46e7557" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 19:32:34 crc kubenswrapper[4792]: I0929 19:32:34.676975 4792 generic.go:334] "Generic (PLEG): container finished" podID="c694fcd6-bd39-4ec9-9b52-536c53bfff92" containerID="68c7e9c6d9fe92a597022c714c0af86e3afc59d2ebded93d333f5da73fbbccb9" exitCode=0 Sep 29 19:32:34 crc kubenswrapper[4792]: I0929 19:32:34.677082 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-vx68g" event={"ID":"c694fcd6-bd39-4ec9-9b52-536c53bfff92","Type":"ContainerDied","Data":"68c7e9c6d9fe92a597022c714c0af86e3afc59d2ebded93d333f5da73fbbccb9"} Sep 29 19:32:36 crc kubenswrapper[4792]: I0929 19:32:36.112156 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-vx68g" Sep 29 19:32:36 crc kubenswrapper[4792]: I0929 19:32:36.209704 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c694fcd6-bd39-4ec9-9b52-536c53bfff92-ssh-key\") pod \"c694fcd6-bd39-4ec9-9b52-536c53bfff92\" (UID: \"c694fcd6-bd39-4ec9-9b52-536c53bfff92\") " Sep 29 19:32:36 crc kubenswrapper[4792]: I0929 19:32:36.209760 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/c694fcd6-bd39-4ec9-9b52-536c53bfff92-ovncontroller-config-0\") pod \"c694fcd6-bd39-4ec9-9b52-536c53bfff92\" (UID: \"c694fcd6-bd39-4ec9-9b52-536c53bfff92\") " Sep 29 19:32:36 crc kubenswrapper[4792]: I0929 19:32:36.209831 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bbgdx\" (UniqueName: \"kubernetes.io/projected/c694fcd6-bd39-4ec9-9b52-536c53bfff92-kube-api-access-bbgdx\") pod \"c694fcd6-bd39-4ec9-9b52-536c53bfff92\" (UID: \"c694fcd6-bd39-4ec9-9b52-536c53bfff92\") " Sep 29 19:32:36 crc kubenswrapper[4792]: I0929 19:32:36.209981 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c694fcd6-bd39-4ec9-9b52-536c53bfff92-ovn-combined-ca-bundle\") pod \"c694fcd6-bd39-4ec9-9b52-536c53bfff92\" (UID: \"c694fcd6-bd39-4ec9-9b52-536c53bfff92\") " Sep 29 19:32:36 crc kubenswrapper[4792]: I0929 19:32:36.210012 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c694fcd6-bd39-4ec9-9b52-536c53bfff92-inventory\") pod \"c694fcd6-bd39-4ec9-9b52-536c53bfff92\" (UID: \"c694fcd6-bd39-4ec9-9b52-536c53bfff92\") " Sep 29 19:32:36 crc kubenswrapper[4792]: I0929 19:32:36.224760 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c694fcd6-bd39-4ec9-9b52-536c53bfff92-ovn-combined-ca-bundle" (OuterVolumeSpecName: "ovn-combined-ca-bundle") pod "c694fcd6-bd39-4ec9-9b52-536c53bfff92" (UID: "c694fcd6-bd39-4ec9-9b52-536c53bfff92"). InnerVolumeSpecName "ovn-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:32:36 crc kubenswrapper[4792]: I0929 19:32:36.232512 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c694fcd6-bd39-4ec9-9b52-536c53bfff92-kube-api-access-bbgdx" (OuterVolumeSpecName: "kube-api-access-bbgdx") pod "c694fcd6-bd39-4ec9-9b52-536c53bfff92" (UID: "c694fcd6-bd39-4ec9-9b52-536c53bfff92"). InnerVolumeSpecName "kube-api-access-bbgdx". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:32:36 crc kubenswrapper[4792]: I0929 19:32:36.242043 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c694fcd6-bd39-4ec9-9b52-536c53bfff92-ovncontroller-config-0" (OuterVolumeSpecName: "ovncontroller-config-0") pod "c694fcd6-bd39-4ec9-9b52-536c53bfff92" (UID: "c694fcd6-bd39-4ec9-9b52-536c53bfff92"). InnerVolumeSpecName "ovncontroller-config-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:32:36 crc kubenswrapper[4792]: I0929 19:32:36.242997 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c694fcd6-bd39-4ec9-9b52-536c53bfff92-inventory" (OuterVolumeSpecName: "inventory") pod "c694fcd6-bd39-4ec9-9b52-536c53bfff92" (UID: "c694fcd6-bd39-4ec9-9b52-536c53bfff92"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:32:36 crc kubenswrapper[4792]: I0929 19:32:36.253466 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c694fcd6-bd39-4ec9-9b52-536c53bfff92-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "c694fcd6-bd39-4ec9-9b52-536c53bfff92" (UID: "c694fcd6-bd39-4ec9-9b52-536c53bfff92"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:32:36 crc kubenswrapper[4792]: I0929 19:32:36.312175 4792 reconciler_common.go:293] "Volume detached for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c694fcd6-bd39-4ec9-9b52-536c53bfff92-ovn-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 19:32:36 crc kubenswrapper[4792]: I0929 19:32:36.312209 4792 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c694fcd6-bd39-4ec9-9b52-536c53bfff92-inventory\") on node \"crc\" DevicePath \"\"" Sep 29 19:32:36 crc kubenswrapper[4792]: I0929 19:32:36.312219 4792 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c694fcd6-bd39-4ec9-9b52-536c53bfff92-ssh-key\") on node \"crc\" DevicePath \"\"" Sep 29 19:32:36 crc kubenswrapper[4792]: I0929 19:32:36.312227 4792 reconciler_common.go:293] "Volume detached for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/c694fcd6-bd39-4ec9-9b52-536c53bfff92-ovncontroller-config-0\") on node \"crc\" DevicePath \"\"" Sep 29 19:32:36 crc kubenswrapper[4792]: I0929 19:32:36.312237 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bbgdx\" (UniqueName: \"kubernetes.io/projected/c694fcd6-bd39-4ec9-9b52-536c53bfff92-kube-api-access-bbgdx\") on node \"crc\" DevicePath \"\"" Sep 29 19:32:36 crc kubenswrapper[4792]: I0929 19:32:36.698863 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-vx68g" event={"ID":"c694fcd6-bd39-4ec9-9b52-536c53bfff92","Type":"ContainerDied","Data":"4075dd8c85969985721bbee05dd8dc98135d672160d64d2cda2a2169e653abbf"} Sep 29 19:32:36 crc kubenswrapper[4792]: I0929 19:32:36.698933 4792 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4075dd8c85969985721bbee05dd8dc98135d672160d64d2cda2a2169e653abbf" Sep 29 19:32:36 crc kubenswrapper[4792]: I0929 19:32:36.698975 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-vx68g" Sep 29 19:32:36 crc kubenswrapper[4792]: I0929 19:32:36.851605 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-jmkwf"] Sep 29 19:32:36 crc kubenswrapper[4792]: E0929 19:32:36.852032 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d569fd41-d679-4b6a-bf0c-519d5e157955" containerName="extract-content" Sep 29 19:32:36 crc kubenswrapper[4792]: I0929 19:32:36.852054 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="d569fd41-d679-4b6a-bf0c-519d5e157955" containerName="extract-content" Sep 29 19:32:36 crc kubenswrapper[4792]: E0929 19:32:36.852090 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d569fd41-d679-4b6a-bf0c-519d5e157955" containerName="extract-utilities" Sep 29 19:32:36 crc kubenswrapper[4792]: I0929 19:32:36.852101 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="d569fd41-d679-4b6a-bf0c-519d5e157955" containerName="extract-utilities" Sep 29 19:32:36 crc kubenswrapper[4792]: E0929 19:32:36.852122 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c694fcd6-bd39-4ec9-9b52-536c53bfff92" containerName="ovn-edpm-deployment-openstack-edpm-ipam" Sep 29 19:32:36 crc kubenswrapper[4792]: I0929 19:32:36.852131 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="c694fcd6-bd39-4ec9-9b52-536c53bfff92" containerName="ovn-edpm-deployment-openstack-edpm-ipam" Sep 29 19:32:36 crc kubenswrapper[4792]: E0929 19:32:36.852150 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d569fd41-d679-4b6a-bf0c-519d5e157955" containerName="registry-server" Sep 29 19:32:36 crc kubenswrapper[4792]: I0929 19:32:36.852157 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="d569fd41-d679-4b6a-bf0c-519d5e157955" containerName="registry-server" Sep 29 19:32:36 crc kubenswrapper[4792]: I0929 19:32:36.852367 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="c694fcd6-bd39-4ec9-9b52-536c53bfff92" containerName="ovn-edpm-deployment-openstack-edpm-ipam" Sep 29 19:32:36 crc kubenswrapper[4792]: I0929 19:32:36.852393 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="d569fd41-d679-4b6a-bf0c-519d5e157955" containerName="registry-server" Sep 29 19:32:36 crc kubenswrapper[4792]: I0929 19:32:36.852986 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-jmkwf" Sep 29 19:32:36 crc kubenswrapper[4792]: I0929 19:32:36.855958 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-neutron-config" Sep 29 19:32:36 crc kubenswrapper[4792]: I0929 19:32:36.856195 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Sep 29 19:32:36 crc kubenswrapper[4792]: I0929 19:32:36.858095 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Sep 29 19:32:36 crc kubenswrapper[4792]: I0929 19:32:36.858251 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Sep 29 19:32:36 crc kubenswrapper[4792]: I0929 19:32:36.858149 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-ovn-metadata-agent-neutron-config" Sep 29 19:32:36 crc kubenswrapper[4792]: I0929 19:32:36.858199 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-jvdxd" Sep 29 19:32:36 crc kubenswrapper[4792]: I0929 19:32:36.868746 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-jmkwf"] Sep 29 19:32:36 crc kubenswrapper[4792]: I0929 19:32:36.929255 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e87aba80-3b0a-409a-8b12-3a8b7c1290d8-neutron-metadata-combined-ca-bundle\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-jmkwf\" (UID: \"e87aba80-3b0a-409a-8b12-3a8b7c1290d8\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-jmkwf" Sep 29 19:32:36 crc kubenswrapper[4792]: I0929 19:32:36.929620 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/e87aba80-3b0a-409a-8b12-3a8b7c1290d8-neutron-ovn-metadata-agent-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-jmkwf\" (UID: \"e87aba80-3b0a-409a-8b12-3a8b7c1290d8\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-jmkwf" Sep 29 19:32:36 crc kubenswrapper[4792]: I0929 19:32:36.929669 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/e87aba80-3b0a-409a-8b12-3a8b7c1290d8-nova-metadata-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-jmkwf\" (UID: \"e87aba80-3b0a-409a-8b12-3a8b7c1290d8\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-jmkwf" Sep 29 19:32:36 crc kubenswrapper[4792]: I0929 19:32:36.929733 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/e87aba80-3b0a-409a-8b12-3a8b7c1290d8-inventory\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-jmkwf\" (UID: \"e87aba80-3b0a-409a-8b12-3a8b7c1290d8\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-jmkwf" Sep 29 19:32:36 crc kubenswrapper[4792]: I0929 19:32:36.929990 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/e87aba80-3b0a-409a-8b12-3a8b7c1290d8-ssh-key\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-jmkwf\" (UID: \"e87aba80-3b0a-409a-8b12-3a8b7c1290d8\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-jmkwf" Sep 29 19:32:36 crc kubenswrapper[4792]: I0929 19:32:36.930185 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b8hmc\" (UniqueName: \"kubernetes.io/projected/e87aba80-3b0a-409a-8b12-3a8b7c1290d8-kube-api-access-b8hmc\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-jmkwf\" (UID: \"e87aba80-3b0a-409a-8b12-3a8b7c1290d8\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-jmkwf" Sep 29 19:32:37 crc kubenswrapper[4792]: I0929 19:32:37.034155 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e87aba80-3b0a-409a-8b12-3a8b7c1290d8-neutron-metadata-combined-ca-bundle\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-jmkwf\" (UID: \"e87aba80-3b0a-409a-8b12-3a8b7c1290d8\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-jmkwf" Sep 29 19:32:37 crc kubenswrapper[4792]: I0929 19:32:37.034456 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/e87aba80-3b0a-409a-8b12-3a8b7c1290d8-neutron-ovn-metadata-agent-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-jmkwf\" (UID: \"e87aba80-3b0a-409a-8b12-3a8b7c1290d8\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-jmkwf" Sep 29 19:32:37 crc kubenswrapper[4792]: I0929 19:32:37.034566 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/e87aba80-3b0a-409a-8b12-3a8b7c1290d8-nova-metadata-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-jmkwf\" (UID: \"e87aba80-3b0a-409a-8b12-3a8b7c1290d8\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-jmkwf" Sep 29 19:32:37 crc kubenswrapper[4792]: I0929 19:32:37.034668 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/e87aba80-3b0a-409a-8b12-3a8b7c1290d8-inventory\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-jmkwf\" (UID: \"e87aba80-3b0a-409a-8b12-3a8b7c1290d8\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-jmkwf" Sep 29 19:32:37 crc kubenswrapper[4792]: I0929 19:32:37.034774 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/e87aba80-3b0a-409a-8b12-3a8b7c1290d8-ssh-key\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-jmkwf\" (UID: \"e87aba80-3b0a-409a-8b12-3a8b7c1290d8\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-jmkwf" Sep 29 19:32:37 crc kubenswrapper[4792]: I0929 19:32:37.034927 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b8hmc\" (UniqueName: \"kubernetes.io/projected/e87aba80-3b0a-409a-8b12-3a8b7c1290d8-kube-api-access-b8hmc\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-jmkwf\" (UID: \"e87aba80-3b0a-409a-8b12-3a8b7c1290d8\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-jmkwf" Sep 29 19:32:37 crc kubenswrapper[4792]: I0929 19:32:37.043455 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/e87aba80-3b0a-409a-8b12-3a8b7c1290d8-ssh-key\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-jmkwf\" (UID: \"e87aba80-3b0a-409a-8b12-3a8b7c1290d8\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-jmkwf" Sep 29 19:32:37 crc kubenswrapper[4792]: I0929 19:32:37.043515 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/e87aba80-3b0a-409a-8b12-3a8b7c1290d8-inventory\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-jmkwf\" (UID: \"e87aba80-3b0a-409a-8b12-3a8b7c1290d8\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-jmkwf" Sep 29 19:32:37 crc kubenswrapper[4792]: I0929 19:32:37.044463 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/e87aba80-3b0a-409a-8b12-3a8b7c1290d8-nova-metadata-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-jmkwf\" (UID: \"e87aba80-3b0a-409a-8b12-3a8b7c1290d8\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-jmkwf" Sep 29 19:32:37 crc kubenswrapper[4792]: I0929 19:32:37.050217 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b8hmc\" (UniqueName: \"kubernetes.io/projected/e87aba80-3b0a-409a-8b12-3a8b7c1290d8-kube-api-access-b8hmc\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-jmkwf\" (UID: \"e87aba80-3b0a-409a-8b12-3a8b7c1290d8\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-jmkwf" Sep 29 19:32:37 crc kubenswrapper[4792]: I0929 19:32:37.052644 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/e87aba80-3b0a-409a-8b12-3a8b7c1290d8-neutron-ovn-metadata-agent-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-jmkwf\" (UID: \"e87aba80-3b0a-409a-8b12-3a8b7c1290d8\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-jmkwf" Sep 29 19:32:37 crc kubenswrapper[4792]: I0929 19:32:37.055691 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e87aba80-3b0a-409a-8b12-3a8b7c1290d8-neutron-metadata-combined-ca-bundle\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-jmkwf\" (UID: \"e87aba80-3b0a-409a-8b12-3a8b7c1290d8\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-jmkwf" Sep 29 19:32:37 crc kubenswrapper[4792]: I0929 19:32:37.217612 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-jmkwf" Sep 29 19:32:37 crc kubenswrapper[4792]: I0929 19:32:37.816881 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-jmkwf"] Sep 29 19:32:38 crc kubenswrapper[4792]: I0929 19:32:38.724246 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-jmkwf" event={"ID":"e87aba80-3b0a-409a-8b12-3a8b7c1290d8","Type":"ContainerStarted","Data":"ea8994655acfabcfd97f8922d4c95f82df4c8fb75a8c124f1cc50151e8da8b60"} Sep 29 19:32:38 crc kubenswrapper[4792]: I0929 19:32:38.724668 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-jmkwf" event={"ID":"e87aba80-3b0a-409a-8b12-3a8b7c1290d8","Type":"ContainerStarted","Data":"70e465fd673560bcfc67f864b5932adae08613b0c113527c967698f652a03f92"} Sep 29 19:32:38 crc kubenswrapper[4792]: I0929 19:32:38.745505 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-jmkwf" podStartSLOduration=2.569605678 podStartE2EDuration="2.745487041s" podCreationTimestamp="2025-09-29 19:32:36 +0000 UTC" firstStartedPulling="2025-09-29 19:32:37.838885061 +0000 UTC m=+2169.832192457" lastFinishedPulling="2025-09-29 19:32:38.014766424 +0000 UTC m=+2170.008073820" observedRunningTime="2025-09-29 19:32:38.743210131 +0000 UTC m=+2170.736517577" watchObservedRunningTime="2025-09-29 19:32:38.745487041 +0000 UTC m=+2170.738794437" Sep 29 19:32:41 crc kubenswrapper[4792]: I0929 19:32:41.959810 4792 patch_prober.go:28] interesting pod/machine-config-daemon-p5q59 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 19:32:41 crc kubenswrapper[4792]: I0929 19:32:41.960648 4792 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" podUID="0ae66548-086e-4ca9-bd6f-281ce46e7557" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 19:33:11 crc kubenswrapper[4792]: I0929 19:33:11.959412 4792 patch_prober.go:28] interesting pod/machine-config-daemon-p5q59 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 19:33:11 crc kubenswrapper[4792]: I0929 19:33:11.960195 4792 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" podUID="0ae66548-086e-4ca9-bd6f-281ce46e7557" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 19:33:11 crc kubenswrapper[4792]: I0929 19:33:11.960264 4792 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" Sep 29 19:33:11 crc kubenswrapper[4792]: I0929 19:33:11.961669 4792 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"b984d9990ebb9d14104b77ed41e9db98bc311c1e21b0dba6547f2b2dea1a040c"} pod="openshift-machine-config-operator/machine-config-daemon-p5q59" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 29 19:33:11 crc kubenswrapper[4792]: I0929 19:33:11.961768 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" podUID="0ae66548-086e-4ca9-bd6f-281ce46e7557" containerName="machine-config-daemon" containerID="cri-o://b984d9990ebb9d14104b77ed41e9db98bc311c1e21b0dba6547f2b2dea1a040c" gracePeriod=600 Sep 29 19:33:13 crc kubenswrapper[4792]: I0929 19:33:13.047887 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" event={"ID":"0ae66548-086e-4ca9-bd6f-281ce46e7557","Type":"ContainerDied","Data":"b984d9990ebb9d14104b77ed41e9db98bc311c1e21b0dba6547f2b2dea1a040c"} Sep 29 19:33:13 crc kubenswrapper[4792]: I0929 19:33:13.047919 4792 generic.go:334] "Generic (PLEG): container finished" podID="0ae66548-086e-4ca9-bd6f-281ce46e7557" containerID="b984d9990ebb9d14104b77ed41e9db98bc311c1e21b0dba6547f2b2dea1a040c" exitCode=0 Sep 29 19:33:13 crc kubenswrapper[4792]: I0929 19:33:13.048515 4792 scope.go:117] "RemoveContainer" containerID="ccfabdbbd2fc28db3a7759f30ddf4fbe532580d663aea81dbf9d9f716c69f3f7" Sep 29 19:33:13 crc kubenswrapper[4792]: I0929 19:33:13.048528 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" event={"ID":"0ae66548-086e-4ca9-bd6f-281ce46e7557","Type":"ContainerStarted","Data":"a82ac18e65bc3c0fc963379d6b48e28ffc3451e0b1a7fdcf4bc177930d10ace7"} Sep 29 19:33:35 crc kubenswrapper[4792]: I0929 19:33:35.245160 4792 generic.go:334] "Generic (PLEG): container finished" podID="e87aba80-3b0a-409a-8b12-3a8b7c1290d8" containerID="ea8994655acfabcfd97f8922d4c95f82df4c8fb75a8c124f1cc50151e8da8b60" exitCode=0 Sep 29 19:33:35 crc kubenswrapper[4792]: I0929 19:33:35.245721 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-jmkwf" event={"ID":"e87aba80-3b0a-409a-8b12-3a8b7c1290d8","Type":"ContainerDied","Data":"ea8994655acfabcfd97f8922d4c95f82df4c8fb75a8c124f1cc50151e8da8b60"} Sep 29 19:33:36 crc kubenswrapper[4792]: I0929 19:33:36.691327 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-jmkwf" Sep 29 19:33:36 crc kubenswrapper[4792]: I0929 19:33:36.821412 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e87aba80-3b0a-409a-8b12-3a8b7c1290d8-neutron-metadata-combined-ca-bundle\") pod \"e87aba80-3b0a-409a-8b12-3a8b7c1290d8\" (UID: \"e87aba80-3b0a-409a-8b12-3a8b7c1290d8\") " Sep 29 19:33:36 crc kubenswrapper[4792]: I0929 19:33:36.821501 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/e87aba80-3b0a-409a-8b12-3a8b7c1290d8-ssh-key\") pod \"e87aba80-3b0a-409a-8b12-3a8b7c1290d8\" (UID: \"e87aba80-3b0a-409a-8b12-3a8b7c1290d8\") " Sep 29 19:33:36 crc kubenswrapper[4792]: I0929 19:33:36.821555 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/e87aba80-3b0a-409a-8b12-3a8b7c1290d8-neutron-ovn-metadata-agent-neutron-config-0\") pod \"e87aba80-3b0a-409a-8b12-3a8b7c1290d8\" (UID: \"e87aba80-3b0a-409a-8b12-3a8b7c1290d8\") " Sep 29 19:33:36 crc kubenswrapper[4792]: I0929 19:33:36.821606 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-b8hmc\" (UniqueName: \"kubernetes.io/projected/e87aba80-3b0a-409a-8b12-3a8b7c1290d8-kube-api-access-b8hmc\") pod \"e87aba80-3b0a-409a-8b12-3a8b7c1290d8\" (UID: \"e87aba80-3b0a-409a-8b12-3a8b7c1290d8\") " Sep 29 19:33:36 crc kubenswrapper[4792]: I0929 19:33:36.821662 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/e87aba80-3b0a-409a-8b12-3a8b7c1290d8-nova-metadata-neutron-config-0\") pod \"e87aba80-3b0a-409a-8b12-3a8b7c1290d8\" (UID: \"e87aba80-3b0a-409a-8b12-3a8b7c1290d8\") " Sep 29 19:33:36 crc kubenswrapper[4792]: I0929 19:33:36.821800 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/e87aba80-3b0a-409a-8b12-3a8b7c1290d8-inventory\") pod \"e87aba80-3b0a-409a-8b12-3a8b7c1290d8\" (UID: \"e87aba80-3b0a-409a-8b12-3a8b7c1290d8\") " Sep 29 19:33:36 crc kubenswrapper[4792]: I0929 19:33:36.829297 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e87aba80-3b0a-409a-8b12-3a8b7c1290d8-kube-api-access-b8hmc" (OuterVolumeSpecName: "kube-api-access-b8hmc") pod "e87aba80-3b0a-409a-8b12-3a8b7c1290d8" (UID: "e87aba80-3b0a-409a-8b12-3a8b7c1290d8"). InnerVolumeSpecName "kube-api-access-b8hmc". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:33:36 crc kubenswrapper[4792]: I0929 19:33:36.836630 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e87aba80-3b0a-409a-8b12-3a8b7c1290d8-neutron-metadata-combined-ca-bundle" (OuterVolumeSpecName: "neutron-metadata-combined-ca-bundle") pod "e87aba80-3b0a-409a-8b12-3a8b7c1290d8" (UID: "e87aba80-3b0a-409a-8b12-3a8b7c1290d8"). InnerVolumeSpecName "neutron-metadata-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:33:36 crc kubenswrapper[4792]: I0929 19:33:36.855428 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e87aba80-3b0a-409a-8b12-3a8b7c1290d8-nova-metadata-neutron-config-0" (OuterVolumeSpecName: "nova-metadata-neutron-config-0") pod "e87aba80-3b0a-409a-8b12-3a8b7c1290d8" (UID: "e87aba80-3b0a-409a-8b12-3a8b7c1290d8"). InnerVolumeSpecName "nova-metadata-neutron-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:33:36 crc kubenswrapper[4792]: I0929 19:33:36.855489 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e87aba80-3b0a-409a-8b12-3a8b7c1290d8-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "e87aba80-3b0a-409a-8b12-3a8b7c1290d8" (UID: "e87aba80-3b0a-409a-8b12-3a8b7c1290d8"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:33:36 crc kubenswrapper[4792]: I0929 19:33:36.856738 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e87aba80-3b0a-409a-8b12-3a8b7c1290d8-inventory" (OuterVolumeSpecName: "inventory") pod "e87aba80-3b0a-409a-8b12-3a8b7c1290d8" (UID: "e87aba80-3b0a-409a-8b12-3a8b7c1290d8"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:33:36 crc kubenswrapper[4792]: I0929 19:33:36.864774 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e87aba80-3b0a-409a-8b12-3a8b7c1290d8-neutron-ovn-metadata-agent-neutron-config-0" (OuterVolumeSpecName: "neutron-ovn-metadata-agent-neutron-config-0") pod "e87aba80-3b0a-409a-8b12-3a8b7c1290d8" (UID: "e87aba80-3b0a-409a-8b12-3a8b7c1290d8"). InnerVolumeSpecName "neutron-ovn-metadata-agent-neutron-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:33:36 crc kubenswrapper[4792]: I0929 19:33:36.924542 4792 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/e87aba80-3b0a-409a-8b12-3a8b7c1290d8-inventory\") on node \"crc\" DevicePath \"\"" Sep 29 19:33:36 crc kubenswrapper[4792]: I0929 19:33:36.924576 4792 reconciler_common.go:293] "Volume detached for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e87aba80-3b0a-409a-8b12-3a8b7c1290d8-neutron-metadata-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 19:33:36 crc kubenswrapper[4792]: I0929 19:33:36.924587 4792 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/e87aba80-3b0a-409a-8b12-3a8b7c1290d8-ssh-key\") on node \"crc\" DevicePath \"\"" Sep 29 19:33:36 crc kubenswrapper[4792]: I0929 19:33:36.924596 4792 reconciler_common.go:293] "Volume detached for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/e87aba80-3b0a-409a-8b12-3a8b7c1290d8-neutron-ovn-metadata-agent-neutron-config-0\") on node \"crc\" DevicePath \"\"" Sep 29 19:33:36 crc kubenswrapper[4792]: I0929 19:33:36.924608 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-b8hmc\" (UniqueName: \"kubernetes.io/projected/e87aba80-3b0a-409a-8b12-3a8b7c1290d8-kube-api-access-b8hmc\") on node \"crc\" DevicePath \"\"" Sep 29 19:33:36 crc kubenswrapper[4792]: I0929 19:33:36.924618 4792 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/e87aba80-3b0a-409a-8b12-3a8b7c1290d8-nova-metadata-neutron-config-0\") on node \"crc\" DevicePath \"\"" Sep 29 19:33:37 crc kubenswrapper[4792]: I0929 19:33:37.262647 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-jmkwf" event={"ID":"e87aba80-3b0a-409a-8b12-3a8b7c1290d8","Type":"ContainerDied","Data":"70e465fd673560bcfc67f864b5932adae08613b0c113527c967698f652a03f92"} Sep 29 19:33:37 crc kubenswrapper[4792]: I0929 19:33:37.262914 4792 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="70e465fd673560bcfc67f864b5932adae08613b0c113527c967698f652a03f92" Sep 29 19:33:37 crc kubenswrapper[4792]: I0929 19:33:37.262918 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-jmkwf" Sep 29 19:33:37 crc kubenswrapper[4792]: I0929 19:33:37.362558 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/libvirt-edpm-deployment-openstack-edpm-ipam-s77fc"] Sep 29 19:33:37 crc kubenswrapper[4792]: E0929 19:33:37.362919 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e87aba80-3b0a-409a-8b12-3a8b7c1290d8" containerName="neutron-metadata-edpm-deployment-openstack-edpm-ipam" Sep 29 19:33:37 crc kubenswrapper[4792]: I0929 19:33:37.362936 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="e87aba80-3b0a-409a-8b12-3a8b7c1290d8" containerName="neutron-metadata-edpm-deployment-openstack-edpm-ipam" Sep 29 19:33:37 crc kubenswrapper[4792]: I0929 19:33:37.363148 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="e87aba80-3b0a-409a-8b12-3a8b7c1290d8" containerName="neutron-metadata-edpm-deployment-openstack-edpm-ipam" Sep 29 19:33:37 crc kubenswrapper[4792]: I0929 19:33:37.363733 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-s77fc" Sep 29 19:33:37 crc kubenswrapper[4792]: I0929 19:33:37.366403 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Sep 29 19:33:37 crc kubenswrapper[4792]: I0929 19:33:37.367007 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Sep 29 19:33:37 crc kubenswrapper[4792]: I0929 19:33:37.367195 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Sep 29 19:33:37 crc kubenswrapper[4792]: I0929 19:33:37.367343 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-jvdxd" Sep 29 19:33:37 crc kubenswrapper[4792]: I0929 19:33:37.370997 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"libvirt-secret" Sep 29 19:33:37 crc kubenswrapper[4792]: I0929 19:33:37.384696 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/libvirt-edpm-deployment-openstack-edpm-ipam-s77fc"] Sep 29 19:33:37 crc kubenswrapper[4792]: I0929 19:33:37.446639 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vxcmd\" (UniqueName: \"kubernetes.io/projected/1a5948bb-2b33-40f6-9a12-1b8b4e3071a7-kube-api-access-vxcmd\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-s77fc\" (UID: \"1a5948bb-2b33-40f6-9a12-1b8b4e3071a7\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-s77fc" Sep 29 19:33:37 crc kubenswrapper[4792]: I0929 19:33:37.446953 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/1a5948bb-2b33-40f6-9a12-1b8b4e3071a7-inventory\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-s77fc\" (UID: \"1a5948bb-2b33-40f6-9a12-1b8b4e3071a7\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-s77fc" Sep 29 19:33:37 crc kubenswrapper[4792]: I0929 19:33:37.447148 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/1a5948bb-2b33-40f6-9a12-1b8b4e3071a7-libvirt-secret-0\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-s77fc\" (UID: \"1a5948bb-2b33-40f6-9a12-1b8b4e3071a7\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-s77fc" Sep 29 19:33:37 crc kubenswrapper[4792]: I0929 19:33:37.447232 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/1a5948bb-2b33-40f6-9a12-1b8b4e3071a7-ssh-key\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-s77fc\" (UID: \"1a5948bb-2b33-40f6-9a12-1b8b4e3071a7\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-s77fc" Sep 29 19:33:37 crc kubenswrapper[4792]: I0929 19:33:37.447278 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1a5948bb-2b33-40f6-9a12-1b8b4e3071a7-libvirt-combined-ca-bundle\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-s77fc\" (UID: \"1a5948bb-2b33-40f6-9a12-1b8b4e3071a7\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-s77fc" Sep 29 19:33:37 crc kubenswrapper[4792]: I0929 19:33:37.549429 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/1a5948bb-2b33-40f6-9a12-1b8b4e3071a7-libvirt-secret-0\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-s77fc\" (UID: \"1a5948bb-2b33-40f6-9a12-1b8b4e3071a7\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-s77fc" Sep 29 19:33:37 crc kubenswrapper[4792]: I0929 19:33:37.549497 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/1a5948bb-2b33-40f6-9a12-1b8b4e3071a7-ssh-key\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-s77fc\" (UID: \"1a5948bb-2b33-40f6-9a12-1b8b4e3071a7\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-s77fc" Sep 29 19:33:37 crc kubenswrapper[4792]: I0929 19:33:37.549534 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1a5948bb-2b33-40f6-9a12-1b8b4e3071a7-libvirt-combined-ca-bundle\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-s77fc\" (UID: \"1a5948bb-2b33-40f6-9a12-1b8b4e3071a7\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-s77fc" Sep 29 19:33:37 crc kubenswrapper[4792]: I0929 19:33:37.549613 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vxcmd\" (UniqueName: \"kubernetes.io/projected/1a5948bb-2b33-40f6-9a12-1b8b4e3071a7-kube-api-access-vxcmd\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-s77fc\" (UID: \"1a5948bb-2b33-40f6-9a12-1b8b4e3071a7\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-s77fc" Sep 29 19:33:37 crc kubenswrapper[4792]: I0929 19:33:37.549709 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/1a5948bb-2b33-40f6-9a12-1b8b4e3071a7-inventory\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-s77fc\" (UID: \"1a5948bb-2b33-40f6-9a12-1b8b4e3071a7\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-s77fc" Sep 29 19:33:37 crc kubenswrapper[4792]: I0929 19:33:37.555056 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/1a5948bb-2b33-40f6-9a12-1b8b4e3071a7-ssh-key\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-s77fc\" (UID: \"1a5948bb-2b33-40f6-9a12-1b8b4e3071a7\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-s77fc" Sep 29 19:33:37 crc kubenswrapper[4792]: I0929 19:33:37.555514 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1a5948bb-2b33-40f6-9a12-1b8b4e3071a7-libvirt-combined-ca-bundle\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-s77fc\" (UID: \"1a5948bb-2b33-40f6-9a12-1b8b4e3071a7\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-s77fc" Sep 29 19:33:37 crc kubenswrapper[4792]: I0929 19:33:37.556155 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/1a5948bb-2b33-40f6-9a12-1b8b4e3071a7-inventory\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-s77fc\" (UID: \"1a5948bb-2b33-40f6-9a12-1b8b4e3071a7\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-s77fc" Sep 29 19:33:37 crc kubenswrapper[4792]: I0929 19:33:37.556959 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/1a5948bb-2b33-40f6-9a12-1b8b4e3071a7-libvirt-secret-0\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-s77fc\" (UID: \"1a5948bb-2b33-40f6-9a12-1b8b4e3071a7\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-s77fc" Sep 29 19:33:37 crc kubenswrapper[4792]: I0929 19:33:37.567096 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vxcmd\" (UniqueName: \"kubernetes.io/projected/1a5948bb-2b33-40f6-9a12-1b8b4e3071a7-kube-api-access-vxcmd\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-s77fc\" (UID: \"1a5948bb-2b33-40f6-9a12-1b8b4e3071a7\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-s77fc" Sep 29 19:33:37 crc kubenswrapper[4792]: I0929 19:33:37.681234 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-s77fc" Sep 29 19:33:38 crc kubenswrapper[4792]: I0929 19:33:38.223642 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/libvirt-edpm-deployment-openstack-edpm-ipam-s77fc"] Sep 29 19:33:38 crc kubenswrapper[4792]: I0929 19:33:38.275592 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-s77fc" event={"ID":"1a5948bb-2b33-40f6-9a12-1b8b4e3071a7","Type":"ContainerStarted","Data":"154f04412d1090b6e6682591d21246b9d7bb03b7624283b281ae8a0aee92fe59"} Sep 29 19:33:39 crc kubenswrapper[4792]: I0929 19:33:39.292078 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-s77fc" event={"ID":"1a5948bb-2b33-40f6-9a12-1b8b4e3071a7","Type":"ContainerStarted","Data":"694f0bb10b6c1363e0e4ff95852321db54247f429f04df8880b921a75ce00b52"} Sep 29 19:33:39 crc kubenswrapper[4792]: I0929 19:33:39.321870 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-s77fc" podStartSLOduration=2.169973879 podStartE2EDuration="2.32182818s" podCreationTimestamp="2025-09-29 19:33:37 +0000 UTC" firstStartedPulling="2025-09-29 19:33:38.231750128 +0000 UTC m=+2230.225057544" lastFinishedPulling="2025-09-29 19:33:38.383604449 +0000 UTC m=+2230.376911845" observedRunningTime="2025-09-29 19:33:39.315618607 +0000 UTC m=+2231.308926063" watchObservedRunningTime="2025-09-29 19:33:39.32182818 +0000 UTC m=+2231.315135586" Sep 29 19:35:41 crc kubenswrapper[4792]: I0929 19:35:41.960163 4792 patch_prober.go:28] interesting pod/machine-config-daemon-p5q59 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 19:35:41 crc kubenswrapper[4792]: I0929 19:35:41.960653 4792 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" podUID="0ae66548-086e-4ca9-bd6f-281ce46e7557" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 19:36:11 crc kubenswrapper[4792]: I0929 19:36:11.960378 4792 patch_prober.go:28] interesting pod/machine-config-daemon-p5q59 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 19:36:11 crc kubenswrapper[4792]: I0929 19:36:11.961176 4792 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" podUID="0ae66548-086e-4ca9-bd6f-281ce46e7557" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 19:36:41 crc kubenswrapper[4792]: I0929 19:36:41.959854 4792 patch_prober.go:28] interesting pod/machine-config-daemon-p5q59 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 19:36:41 crc kubenswrapper[4792]: I0929 19:36:41.960568 4792 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" podUID="0ae66548-086e-4ca9-bd6f-281ce46e7557" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 19:36:41 crc kubenswrapper[4792]: I0929 19:36:41.960628 4792 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" Sep 29 19:36:41 crc kubenswrapper[4792]: I0929 19:36:41.962094 4792 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"a82ac18e65bc3c0fc963379d6b48e28ffc3451e0b1a7fdcf4bc177930d10ace7"} pod="openshift-machine-config-operator/machine-config-daemon-p5q59" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 29 19:36:41 crc kubenswrapper[4792]: I0929 19:36:41.962190 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" podUID="0ae66548-086e-4ca9-bd6f-281ce46e7557" containerName="machine-config-daemon" containerID="cri-o://a82ac18e65bc3c0fc963379d6b48e28ffc3451e0b1a7fdcf4bc177930d10ace7" gracePeriod=600 Sep 29 19:36:42 crc kubenswrapper[4792]: E0929 19:36:42.089197 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p5q59_openshift-machine-config-operator(0ae66548-086e-4ca9-bd6f-281ce46e7557)\"" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" podUID="0ae66548-086e-4ca9-bd6f-281ce46e7557" Sep 29 19:36:42 crc kubenswrapper[4792]: I0929 19:36:42.966397 4792 generic.go:334] "Generic (PLEG): container finished" podID="0ae66548-086e-4ca9-bd6f-281ce46e7557" containerID="a82ac18e65bc3c0fc963379d6b48e28ffc3451e0b1a7fdcf4bc177930d10ace7" exitCode=0 Sep 29 19:36:42 crc kubenswrapper[4792]: I0929 19:36:42.966459 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" event={"ID":"0ae66548-086e-4ca9-bd6f-281ce46e7557","Type":"ContainerDied","Data":"a82ac18e65bc3c0fc963379d6b48e28ffc3451e0b1a7fdcf4bc177930d10ace7"} Sep 29 19:36:42 crc kubenswrapper[4792]: I0929 19:36:42.966727 4792 scope.go:117] "RemoveContainer" containerID="b984d9990ebb9d14104b77ed41e9db98bc311c1e21b0dba6547f2b2dea1a040c" Sep 29 19:36:42 crc kubenswrapper[4792]: I0929 19:36:42.967664 4792 scope.go:117] "RemoveContainer" containerID="a82ac18e65bc3c0fc963379d6b48e28ffc3451e0b1a7fdcf4bc177930d10ace7" Sep 29 19:36:42 crc kubenswrapper[4792]: E0929 19:36:42.968317 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p5q59_openshift-machine-config-operator(0ae66548-086e-4ca9-bd6f-281ce46e7557)\"" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" podUID="0ae66548-086e-4ca9-bd6f-281ce46e7557" Sep 29 19:36:56 crc kubenswrapper[4792]: I0929 19:36:56.016371 4792 scope.go:117] "RemoveContainer" containerID="a82ac18e65bc3c0fc963379d6b48e28ffc3451e0b1a7fdcf4bc177930d10ace7" Sep 29 19:36:56 crc kubenswrapper[4792]: E0929 19:36:56.019474 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p5q59_openshift-machine-config-operator(0ae66548-086e-4ca9-bd6f-281ce46e7557)\"" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" podUID="0ae66548-086e-4ca9-bd6f-281ce46e7557" Sep 29 19:37:07 crc kubenswrapper[4792]: I0929 19:37:07.015886 4792 scope.go:117] "RemoveContainer" containerID="a82ac18e65bc3c0fc963379d6b48e28ffc3451e0b1a7fdcf4bc177930d10ace7" Sep 29 19:37:07 crc kubenswrapper[4792]: E0929 19:37:07.016597 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p5q59_openshift-machine-config-operator(0ae66548-086e-4ca9-bd6f-281ce46e7557)\"" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" podUID="0ae66548-086e-4ca9-bd6f-281ce46e7557" Sep 29 19:37:18 crc kubenswrapper[4792]: I0929 19:37:18.965496 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-q8fmr"] Sep 29 19:37:18 crc kubenswrapper[4792]: I0929 19:37:18.975217 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-q8fmr" Sep 29 19:37:18 crc kubenswrapper[4792]: I0929 19:37:18.986431 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-q8fmr"] Sep 29 19:37:19 crc kubenswrapper[4792]: I0929 19:37:19.027839 4792 scope.go:117] "RemoveContainer" containerID="a82ac18e65bc3c0fc963379d6b48e28ffc3451e0b1a7fdcf4bc177930d10ace7" Sep 29 19:37:19 crc kubenswrapper[4792]: E0929 19:37:19.028341 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p5q59_openshift-machine-config-operator(0ae66548-086e-4ca9-bd6f-281ce46e7557)\"" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" podUID="0ae66548-086e-4ca9-bd6f-281ce46e7557" Sep 29 19:37:19 crc kubenswrapper[4792]: I0929 19:37:19.049562 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c3a91ff8-561c-48c6-ad49-205ea9f5a4d6-utilities\") pod \"redhat-operators-q8fmr\" (UID: \"c3a91ff8-561c-48c6-ad49-205ea9f5a4d6\") " pod="openshift-marketplace/redhat-operators-q8fmr" Sep 29 19:37:19 crc kubenswrapper[4792]: I0929 19:37:19.049646 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c3a91ff8-561c-48c6-ad49-205ea9f5a4d6-catalog-content\") pod \"redhat-operators-q8fmr\" (UID: \"c3a91ff8-561c-48c6-ad49-205ea9f5a4d6\") " pod="openshift-marketplace/redhat-operators-q8fmr" Sep 29 19:37:19 crc kubenswrapper[4792]: I0929 19:37:19.049741 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-np6cq\" (UniqueName: \"kubernetes.io/projected/c3a91ff8-561c-48c6-ad49-205ea9f5a4d6-kube-api-access-np6cq\") pod \"redhat-operators-q8fmr\" (UID: \"c3a91ff8-561c-48c6-ad49-205ea9f5a4d6\") " pod="openshift-marketplace/redhat-operators-q8fmr" Sep 29 19:37:19 crc kubenswrapper[4792]: I0929 19:37:19.151113 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-np6cq\" (UniqueName: \"kubernetes.io/projected/c3a91ff8-561c-48c6-ad49-205ea9f5a4d6-kube-api-access-np6cq\") pod \"redhat-operators-q8fmr\" (UID: \"c3a91ff8-561c-48c6-ad49-205ea9f5a4d6\") " pod="openshift-marketplace/redhat-operators-q8fmr" Sep 29 19:37:19 crc kubenswrapper[4792]: I0929 19:37:19.151185 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c3a91ff8-561c-48c6-ad49-205ea9f5a4d6-utilities\") pod \"redhat-operators-q8fmr\" (UID: \"c3a91ff8-561c-48c6-ad49-205ea9f5a4d6\") " pod="openshift-marketplace/redhat-operators-q8fmr" Sep 29 19:37:19 crc kubenswrapper[4792]: I0929 19:37:19.151249 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c3a91ff8-561c-48c6-ad49-205ea9f5a4d6-catalog-content\") pod \"redhat-operators-q8fmr\" (UID: \"c3a91ff8-561c-48c6-ad49-205ea9f5a4d6\") " pod="openshift-marketplace/redhat-operators-q8fmr" Sep 29 19:37:19 crc kubenswrapper[4792]: I0929 19:37:19.151668 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c3a91ff8-561c-48c6-ad49-205ea9f5a4d6-catalog-content\") pod \"redhat-operators-q8fmr\" (UID: \"c3a91ff8-561c-48c6-ad49-205ea9f5a4d6\") " pod="openshift-marketplace/redhat-operators-q8fmr" Sep 29 19:37:19 crc kubenswrapper[4792]: I0929 19:37:19.151723 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c3a91ff8-561c-48c6-ad49-205ea9f5a4d6-utilities\") pod \"redhat-operators-q8fmr\" (UID: \"c3a91ff8-561c-48c6-ad49-205ea9f5a4d6\") " pod="openshift-marketplace/redhat-operators-q8fmr" Sep 29 19:37:19 crc kubenswrapper[4792]: I0929 19:37:19.173245 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-np6cq\" (UniqueName: \"kubernetes.io/projected/c3a91ff8-561c-48c6-ad49-205ea9f5a4d6-kube-api-access-np6cq\") pod \"redhat-operators-q8fmr\" (UID: \"c3a91ff8-561c-48c6-ad49-205ea9f5a4d6\") " pod="openshift-marketplace/redhat-operators-q8fmr" Sep 29 19:37:19 crc kubenswrapper[4792]: I0929 19:37:19.312724 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-q8fmr" Sep 29 19:37:19 crc kubenswrapper[4792]: I0929 19:37:19.785700 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-q8fmr"] Sep 29 19:37:20 crc kubenswrapper[4792]: I0929 19:37:20.307483 4792 generic.go:334] "Generic (PLEG): container finished" podID="c3a91ff8-561c-48c6-ad49-205ea9f5a4d6" containerID="e9c63c2d4f74589d71bdf7c51509266c9f709985f4f43f5da1bfa14686041cde" exitCode=0 Sep 29 19:37:20 crc kubenswrapper[4792]: I0929 19:37:20.308809 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-q8fmr" event={"ID":"c3a91ff8-561c-48c6-ad49-205ea9f5a4d6","Type":"ContainerDied","Data":"e9c63c2d4f74589d71bdf7c51509266c9f709985f4f43f5da1bfa14686041cde"} Sep 29 19:37:20 crc kubenswrapper[4792]: I0929 19:37:20.309164 4792 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Sep 29 19:37:20 crc kubenswrapper[4792]: I0929 19:37:20.316295 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-q8fmr" event={"ID":"c3a91ff8-561c-48c6-ad49-205ea9f5a4d6","Type":"ContainerStarted","Data":"f91232bdc3d527db67d38d80d875668238206e9fd9fcc381b5ff4fc451027d43"} Sep 29 19:37:21 crc kubenswrapper[4792]: I0929 19:37:21.324094 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-q8fmr" event={"ID":"c3a91ff8-561c-48c6-ad49-205ea9f5a4d6","Type":"ContainerStarted","Data":"d2afa9a43676008edf6ea6e6442e573797223b5c91c5bcbcd10ca6c3be81797a"} Sep 29 19:37:25 crc kubenswrapper[4792]: I0929 19:37:25.362457 4792 generic.go:334] "Generic (PLEG): container finished" podID="c3a91ff8-561c-48c6-ad49-205ea9f5a4d6" containerID="d2afa9a43676008edf6ea6e6442e573797223b5c91c5bcbcd10ca6c3be81797a" exitCode=0 Sep 29 19:37:25 crc kubenswrapper[4792]: I0929 19:37:25.362538 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-q8fmr" event={"ID":"c3a91ff8-561c-48c6-ad49-205ea9f5a4d6","Type":"ContainerDied","Data":"d2afa9a43676008edf6ea6e6442e573797223b5c91c5bcbcd10ca6c3be81797a"} Sep 29 19:37:26 crc kubenswrapper[4792]: I0929 19:37:26.374118 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-q8fmr" event={"ID":"c3a91ff8-561c-48c6-ad49-205ea9f5a4d6","Type":"ContainerStarted","Data":"2907aa595e5a705b49fa92366f390dc408317001671ade6877a7f1838cf8acb1"} Sep 29 19:37:26 crc kubenswrapper[4792]: I0929 19:37:26.395891 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-q8fmr" podStartSLOduration=2.892909075 podStartE2EDuration="8.395838666s" podCreationTimestamp="2025-09-29 19:37:18 +0000 UTC" firstStartedPulling="2025-09-29 19:37:20.308892054 +0000 UTC m=+2452.302199450" lastFinishedPulling="2025-09-29 19:37:25.811821635 +0000 UTC m=+2457.805129041" observedRunningTime="2025-09-29 19:37:26.391151533 +0000 UTC m=+2458.384458939" watchObservedRunningTime="2025-09-29 19:37:26.395838666 +0000 UTC m=+2458.389146092" Sep 29 19:37:29 crc kubenswrapper[4792]: I0929 19:37:29.313908 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-q8fmr" Sep 29 19:37:29 crc kubenswrapper[4792]: I0929 19:37:29.314750 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-q8fmr" Sep 29 19:37:30 crc kubenswrapper[4792]: I0929 19:37:30.015729 4792 scope.go:117] "RemoveContainer" containerID="a82ac18e65bc3c0fc963379d6b48e28ffc3451e0b1a7fdcf4bc177930d10ace7" Sep 29 19:37:30 crc kubenswrapper[4792]: E0929 19:37:30.016030 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p5q59_openshift-machine-config-operator(0ae66548-086e-4ca9-bd6f-281ce46e7557)\"" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" podUID="0ae66548-086e-4ca9-bd6f-281ce46e7557" Sep 29 19:37:30 crc kubenswrapper[4792]: I0929 19:37:30.367251 4792 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-q8fmr" podUID="c3a91ff8-561c-48c6-ad49-205ea9f5a4d6" containerName="registry-server" probeResult="failure" output=< Sep 29 19:37:30 crc kubenswrapper[4792]: timeout: failed to connect service ":50051" within 1s Sep 29 19:37:30 crc kubenswrapper[4792]: > Sep 29 19:37:39 crc kubenswrapper[4792]: I0929 19:37:39.357954 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-q8fmr" Sep 29 19:37:39 crc kubenswrapper[4792]: I0929 19:37:39.419487 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-q8fmr" Sep 29 19:37:39 crc kubenswrapper[4792]: I0929 19:37:39.594487 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-q8fmr"] Sep 29 19:37:40 crc kubenswrapper[4792]: I0929 19:37:40.492221 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-q8fmr" podUID="c3a91ff8-561c-48c6-ad49-205ea9f5a4d6" containerName="registry-server" containerID="cri-o://2907aa595e5a705b49fa92366f390dc408317001671ade6877a7f1838cf8acb1" gracePeriod=2 Sep 29 19:37:40 crc kubenswrapper[4792]: I0929 19:37:40.959365 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-q8fmr" Sep 29 19:37:41 crc kubenswrapper[4792]: I0929 19:37:41.115507 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c3a91ff8-561c-48c6-ad49-205ea9f5a4d6-catalog-content\") pod \"c3a91ff8-561c-48c6-ad49-205ea9f5a4d6\" (UID: \"c3a91ff8-561c-48c6-ad49-205ea9f5a4d6\") " Sep 29 19:37:41 crc kubenswrapper[4792]: I0929 19:37:41.115699 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c3a91ff8-561c-48c6-ad49-205ea9f5a4d6-utilities\") pod \"c3a91ff8-561c-48c6-ad49-205ea9f5a4d6\" (UID: \"c3a91ff8-561c-48c6-ad49-205ea9f5a4d6\") " Sep 29 19:37:41 crc kubenswrapper[4792]: I0929 19:37:41.115751 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-np6cq\" (UniqueName: \"kubernetes.io/projected/c3a91ff8-561c-48c6-ad49-205ea9f5a4d6-kube-api-access-np6cq\") pod \"c3a91ff8-561c-48c6-ad49-205ea9f5a4d6\" (UID: \"c3a91ff8-561c-48c6-ad49-205ea9f5a4d6\") " Sep 29 19:37:41 crc kubenswrapper[4792]: I0929 19:37:41.116914 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c3a91ff8-561c-48c6-ad49-205ea9f5a4d6-utilities" (OuterVolumeSpecName: "utilities") pod "c3a91ff8-561c-48c6-ad49-205ea9f5a4d6" (UID: "c3a91ff8-561c-48c6-ad49-205ea9f5a4d6"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 19:37:41 crc kubenswrapper[4792]: I0929 19:37:41.124394 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c3a91ff8-561c-48c6-ad49-205ea9f5a4d6-kube-api-access-np6cq" (OuterVolumeSpecName: "kube-api-access-np6cq") pod "c3a91ff8-561c-48c6-ad49-205ea9f5a4d6" (UID: "c3a91ff8-561c-48c6-ad49-205ea9f5a4d6"). InnerVolumeSpecName "kube-api-access-np6cq". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:37:41 crc kubenswrapper[4792]: I0929 19:37:41.201298 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c3a91ff8-561c-48c6-ad49-205ea9f5a4d6-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "c3a91ff8-561c-48c6-ad49-205ea9f5a4d6" (UID: "c3a91ff8-561c-48c6-ad49-205ea9f5a4d6"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 19:37:41 crc kubenswrapper[4792]: I0929 19:37:41.219023 4792 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c3a91ff8-561c-48c6-ad49-205ea9f5a4d6-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 19:37:41 crc kubenswrapper[4792]: I0929 19:37:41.219058 4792 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c3a91ff8-561c-48c6-ad49-205ea9f5a4d6-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 19:37:41 crc kubenswrapper[4792]: I0929 19:37:41.219067 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-np6cq\" (UniqueName: \"kubernetes.io/projected/c3a91ff8-561c-48c6-ad49-205ea9f5a4d6-kube-api-access-np6cq\") on node \"crc\" DevicePath \"\"" Sep 29 19:37:41 crc kubenswrapper[4792]: I0929 19:37:41.503718 4792 generic.go:334] "Generic (PLEG): container finished" podID="c3a91ff8-561c-48c6-ad49-205ea9f5a4d6" containerID="2907aa595e5a705b49fa92366f390dc408317001671ade6877a7f1838cf8acb1" exitCode=0 Sep 29 19:37:41 crc kubenswrapper[4792]: I0929 19:37:41.503787 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-q8fmr" Sep 29 19:37:41 crc kubenswrapper[4792]: I0929 19:37:41.503834 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-q8fmr" event={"ID":"c3a91ff8-561c-48c6-ad49-205ea9f5a4d6","Type":"ContainerDied","Data":"2907aa595e5a705b49fa92366f390dc408317001671ade6877a7f1838cf8acb1"} Sep 29 19:37:41 crc kubenswrapper[4792]: I0929 19:37:41.504203 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-q8fmr" event={"ID":"c3a91ff8-561c-48c6-ad49-205ea9f5a4d6","Type":"ContainerDied","Data":"f91232bdc3d527db67d38d80d875668238206e9fd9fcc381b5ff4fc451027d43"} Sep 29 19:37:41 crc kubenswrapper[4792]: I0929 19:37:41.504247 4792 scope.go:117] "RemoveContainer" containerID="2907aa595e5a705b49fa92366f390dc408317001671ade6877a7f1838cf8acb1" Sep 29 19:37:41 crc kubenswrapper[4792]: I0929 19:37:41.524506 4792 scope.go:117] "RemoveContainer" containerID="d2afa9a43676008edf6ea6e6442e573797223b5c91c5bcbcd10ca6c3be81797a" Sep 29 19:37:41 crc kubenswrapper[4792]: I0929 19:37:41.544707 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-q8fmr"] Sep 29 19:37:41 crc kubenswrapper[4792]: I0929 19:37:41.553892 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-q8fmr"] Sep 29 19:37:41 crc kubenswrapper[4792]: I0929 19:37:41.562269 4792 scope.go:117] "RemoveContainer" containerID="e9c63c2d4f74589d71bdf7c51509266c9f709985f4f43f5da1bfa14686041cde" Sep 29 19:37:41 crc kubenswrapper[4792]: I0929 19:37:41.624185 4792 scope.go:117] "RemoveContainer" containerID="2907aa595e5a705b49fa92366f390dc408317001671ade6877a7f1838cf8acb1" Sep 29 19:37:41 crc kubenswrapper[4792]: E0929 19:37:41.624692 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2907aa595e5a705b49fa92366f390dc408317001671ade6877a7f1838cf8acb1\": container with ID starting with 2907aa595e5a705b49fa92366f390dc408317001671ade6877a7f1838cf8acb1 not found: ID does not exist" containerID="2907aa595e5a705b49fa92366f390dc408317001671ade6877a7f1838cf8acb1" Sep 29 19:37:41 crc kubenswrapper[4792]: I0929 19:37:41.624770 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2907aa595e5a705b49fa92366f390dc408317001671ade6877a7f1838cf8acb1"} err="failed to get container status \"2907aa595e5a705b49fa92366f390dc408317001671ade6877a7f1838cf8acb1\": rpc error: code = NotFound desc = could not find container \"2907aa595e5a705b49fa92366f390dc408317001671ade6877a7f1838cf8acb1\": container with ID starting with 2907aa595e5a705b49fa92366f390dc408317001671ade6877a7f1838cf8acb1 not found: ID does not exist" Sep 29 19:37:41 crc kubenswrapper[4792]: I0929 19:37:41.624796 4792 scope.go:117] "RemoveContainer" containerID="d2afa9a43676008edf6ea6e6442e573797223b5c91c5bcbcd10ca6c3be81797a" Sep 29 19:37:41 crc kubenswrapper[4792]: E0929 19:37:41.625164 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d2afa9a43676008edf6ea6e6442e573797223b5c91c5bcbcd10ca6c3be81797a\": container with ID starting with d2afa9a43676008edf6ea6e6442e573797223b5c91c5bcbcd10ca6c3be81797a not found: ID does not exist" containerID="d2afa9a43676008edf6ea6e6442e573797223b5c91c5bcbcd10ca6c3be81797a" Sep 29 19:37:41 crc kubenswrapper[4792]: I0929 19:37:41.625194 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d2afa9a43676008edf6ea6e6442e573797223b5c91c5bcbcd10ca6c3be81797a"} err="failed to get container status \"d2afa9a43676008edf6ea6e6442e573797223b5c91c5bcbcd10ca6c3be81797a\": rpc error: code = NotFound desc = could not find container \"d2afa9a43676008edf6ea6e6442e573797223b5c91c5bcbcd10ca6c3be81797a\": container with ID starting with d2afa9a43676008edf6ea6e6442e573797223b5c91c5bcbcd10ca6c3be81797a not found: ID does not exist" Sep 29 19:37:41 crc kubenswrapper[4792]: I0929 19:37:41.625214 4792 scope.go:117] "RemoveContainer" containerID="e9c63c2d4f74589d71bdf7c51509266c9f709985f4f43f5da1bfa14686041cde" Sep 29 19:37:41 crc kubenswrapper[4792]: E0929 19:37:41.625552 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e9c63c2d4f74589d71bdf7c51509266c9f709985f4f43f5da1bfa14686041cde\": container with ID starting with e9c63c2d4f74589d71bdf7c51509266c9f709985f4f43f5da1bfa14686041cde not found: ID does not exist" containerID="e9c63c2d4f74589d71bdf7c51509266c9f709985f4f43f5da1bfa14686041cde" Sep 29 19:37:41 crc kubenswrapper[4792]: I0929 19:37:41.625602 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e9c63c2d4f74589d71bdf7c51509266c9f709985f4f43f5da1bfa14686041cde"} err="failed to get container status \"e9c63c2d4f74589d71bdf7c51509266c9f709985f4f43f5da1bfa14686041cde\": rpc error: code = NotFound desc = could not find container \"e9c63c2d4f74589d71bdf7c51509266c9f709985f4f43f5da1bfa14686041cde\": container with ID starting with e9c63c2d4f74589d71bdf7c51509266c9f709985f4f43f5da1bfa14686041cde not found: ID does not exist" Sep 29 19:37:43 crc kubenswrapper[4792]: I0929 19:37:43.032905 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c3a91ff8-561c-48c6-ad49-205ea9f5a4d6" path="/var/lib/kubelet/pods/c3a91ff8-561c-48c6-ad49-205ea9f5a4d6/volumes" Sep 29 19:37:45 crc kubenswrapper[4792]: I0929 19:37:45.015716 4792 scope.go:117] "RemoveContainer" containerID="a82ac18e65bc3c0fc963379d6b48e28ffc3451e0b1a7fdcf4bc177930d10ace7" Sep 29 19:37:45 crc kubenswrapper[4792]: E0929 19:37:45.016632 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p5q59_openshift-machine-config-operator(0ae66548-086e-4ca9-bd6f-281ce46e7557)\"" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" podUID="0ae66548-086e-4ca9-bd6f-281ce46e7557" Sep 29 19:38:00 crc kubenswrapper[4792]: I0929 19:38:00.015571 4792 scope.go:117] "RemoveContainer" containerID="a82ac18e65bc3c0fc963379d6b48e28ffc3451e0b1a7fdcf4bc177930d10ace7" Sep 29 19:38:00 crc kubenswrapper[4792]: E0929 19:38:00.016385 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p5q59_openshift-machine-config-operator(0ae66548-086e-4ca9-bd6f-281ce46e7557)\"" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" podUID="0ae66548-086e-4ca9-bd6f-281ce46e7557" Sep 29 19:38:12 crc kubenswrapper[4792]: I0929 19:38:12.015031 4792 scope.go:117] "RemoveContainer" containerID="a82ac18e65bc3c0fc963379d6b48e28ffc3451e0b1a7fdcf4bc177930d10ace7" Sep 29 19:38:12 crc kubenswrapper[4792]: E0929 19:38:12.015748 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p5q59_openshift-machine-config-operator(0ae66548-086e-4ca9-bd6f-281ce46e7557)\"" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" podUID="0ae66548-086e-4ca9-bd6f-281ce46e7557" Sep 29 19:38:27 crc kubenswrapper[4792]: I0929 19:38:27.015968 4792 scope.go:117] "RemoveContainer" containerID="a82ac18e65bc3c0fc963379d6b48e28ffc3451e0b1a7fdcf4bc177930d10ace7" Sep 29 19:38:27 crc kubenswrapper[4792]: E0929 19:38:27.017549 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p5q59_openshift-machine-config-operator(0ae66548-086e-4ca9-bd6f-281ce46e7557)\"" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" podUID="0ae66548-086e-4ca9-bd6f-281ce46e7557" Sep 29 19:38:29 crc kubenswrapper[4792]: I0929 19:38:29.936236 4792 generic.go:334] "Generic (PLEG): container finished" podID="1a5948bb-2b33-40f6-9a12-1b8b4e3071a7" containerID="694f0bb10b6c1363e0e4ff95852321db54247f429f04df8880b921a75ce00b52" exitCode=0 Sep 29 19:38:29 crc kubenswrapper[4792]: I0929 19:38:29.936368 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-s77fc" event={"ID":"1a5948bb-2b33-40f6-9a12-1b8b4e3071a7","Type":"ContainerDied","Data":"694f0bb10b6c1363e0e4ff95852321db54247f429f04df8880b921a75ce00b52"} Sep 29 19:38:31 crc kubenswrapper[4792]: I0929 19:38:31.297882 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-s77fc" Sep 29 19:38:31 crc kubenswrapper[4792]: I0929 19:38:31.417228 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1a5948bb-2b33-40f6-9a12-1b8b4e3071a7-libvirt-combined-ca-bundle\") pod \"1a5948bb-2b33-40f6-9a12-1b8b4e3071a7\" (UID: \"1a5948bb-2b33-40f6-9a12-1b8b4e3071a7\") " Sep 29 19:38:31 crc kubenswrapper[4792]: I0929 19:38:31.417276 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/1a5948bb-2b33-40f6-9a12-1b8b4e3071a7-libvirt-secret-0\") pod \"1a5948bb-2b33-40f6-9a12-1b8b4e3071a7\" (UID: \"1a5948bb-2b33-40f6-9a12-1b8b4e3071a7\") " Sep 29 19:38:31 crc kubenswrapper[4792]: I0929 19:38:31.417336 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vxcmd\" (UniqueName: \"kubernetes.io/projected/1a5948bb-2b33-40f6-9a12-1b8b4e3071a7-kube-api-access-vxcmd\") pod \"1a5948bb-2b33-40f6-9a12-1b8b4e3071a7\" (UID: \"1a5948bb-2b33-40f6-9a12-1b8b4e3071a7\") " Sep 29 19:38:31 crc kubenswrapper[4792]: I0929 19:38:31.417456 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/1a5948bb-2b33-40f6-9a12-1b8b4e3071a7-inventory\") pod \"1a5948bb-2b33-40f6-9a12-1b8b4e3071a7\" (UID: \"1a5948bb-2b33-40f6-9a12-1b8b4e3071a7\") " Sep 29 19:38:31 crc kubenswrapper[4792]: I0929 19:38:31.417496 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/1a5948bb-2b33-40f6-9a12-1b8b4e3071a7-ssh-key\") pod \"1a5948bb-2b33-40f6-9a12-1b8b4e3071a7\" (UID: \"1a5948bb-2b33-40f6-9a12-1b8b4e3071a7\") " Sep 29 19:38:31 crc kubenswrapper[4792]: I0929 19:38:31.422694 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1a5948bb-2b33-40f6-9a12-1b8b4e3071a7-kube-api-access-vxcmd" (OuterVolumeSpecName: "kube-api-access-vxcmd") pod "1a5948bb-2b33-40f6-9a12-1b8b4e3071a7" (UID: "1a5948bb-2b33-40f6-9a12-1b8b4e3071a7"). InnerVolumeSpecName "kube-api-access-vxcmd". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:38:31 crc kubenswrapper[4792]: I0929 19:38:31.425060 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1a5948bb-2b33-40f6-9a12-1b8b4e3071a7-libvirt-combined-ca-bundle" (OuterVolumeSpecName: "libvirt-combined-ca-bundle") pod "1a5948bb-2b33-40f6-9a12-1b8b4e3071a7" (UID: "1a5948bb-2b33-40f6-9a12-1b8b4e3071a7"). InnerVolumeSpecName "libvirt-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:38:31 crc kubenswrapper[4792]: I0929 19:38:31.443484 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1a5948bb-2b33-40f6-9a12-1b8b4e3071a7-inventory" (OuterVolumeSpecName: "inventory") pod "1a5948bb-2b33-40f6-9a12-1b8b4e3071a7" (UID: "1a5948bb-2b33-40f6-9a12-1b8b4e3071a7"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:38:31 crc kubenswrapper[4792]: I0929 19:38:31.445517 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1a5948bb-2b33-40f6-9a12-1b8b4e3071a7-libvirt-secret-0" (OuterVolumeSpecName: "libvirt-secret-0") pod "1a5948bb-2b33-40f6-9a12-1b8b4e3071a7" (UID: "1a5948bb-2b33-40f6-9a12-1b8b4e3071a7"). InnerVolumeSpecName "libvirt-secret-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:38:31 crc kubenswrapper[4792]: I0929 19:38:31.451075 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1a5948bb-2b33-40f6-9a12-1b8b4e3071a7-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "1a5948bb-2b33-40f6-9a12-1b8b4e3071a7" (UID: "1a5948bb-2b33-40f6-9a12-1b8b4e3071a7"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:38:31 crc kubenswrapper[4792]: I0929 19:38:31.519253 4792 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/1a5948bb-2b33-40f6-9a12-1b8b4e3071a7-inventory\") on node \"crc\" DevicePath \"\"" Sep 29 19:38:31 crc kubenswrapper[4792]: I0929 19:38:31.519277 4792 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/1a5948bb-2b33-40f6-9a12-1b8b4e3071a7-ssh-key\") on node \"crc\" DevicePath \"\"" Sep 29 19:38:31 crc kubenswrapper[4792]: I0929 19:38:31.519287 4792 reconciler_common.go:293] "Volume detached for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1a5948bb-2b33-40f6-9a12-1b8b4e3071a7-libvirt-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 19:38:31 crc kubenswrapper[4792]: I0929 19:38:31.519297 4792 reconciler_common.go:293] "Volume detached for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/1a5948bb-2b33-40f6-9a12-1b8b4e3071a7-libvirt-secret-0\") on node \"crc\" DevicePath \"\"" Sep 29 19:38:31 crc kubenswrapper[4792]: I0929 19:38:31.519305 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vxcmd\" (UniqueName: \"kubernetes.io/projected/1a5948bb-2b33-40f6-9a12-1b8b4e3071a7-kube-api-access-vxcmd\") on node \"crc\" DevicePath \"\"" Sep 29 19:38:31 crc kubenswrapper[4792]: I0929 19:38:31.954123 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-s77fc" event={"ID":"1a5948bb-2b33-40f6-9a12-1b8b4e3071a7","Type":"ContainerDied","Data":"154f04412d1090b6e6682591d21246b9d7bb03b7624283b281ae8a0aee92fe59"} Sep 29 19:38:31 crc kubenswrapper[4792]: I0929 19:38:31.954151 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-s77fc" Sep 29 19:38:31 crc kubenswrapper[4792]: I0929 19:38:31.954170 4792 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="154f04412d1090b6e6682591d21246b9d7bb03b7624283b281ae8a0aee92fe59" Sep 29 19:38:32 crc kubenswrapper[4792]: I0929 19:38:32.161041 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-edpm-deployment-openstack-edpm-ipam-ghrcf"] Sep 29 19:38:32 crc kubenswrapper[4792]: E0929 19:38:32.161592 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c3a91ff8-561c-48c6-ad49-205ea9f5a4d6" containerName="extract-utilities" Sep 29 19:38:32 crc kubenswrapper[4792]: I0929 19:38:32.161619 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="c3a91ff8-561c-48c6-ad49-205ea9f5a4d6" containerName="extract-utilities" Sep 29 19:38:32 crc kubenswrapper[4792]: E0929 19:38:32.161641 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c3a91ff8-561c-48c6-ad49-205ea9f5a4d6" containerName="registry-server" Sep 29 19:38:32 crc kubenswrapper[4792]: I0929 19:38:32.161654 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="c3a91ff8-561c-48c6-ad49-205ea9f5a4d6" containerName="registry-server" Sep 29 19:38:32 crc kubenswrapper[4792]: E0929 19:38:32.161684 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c3a91ff8-561c-48c6-ad49-205ea9f5a4d6" containerName="extract-content" Sep 29 19:38:32 crc kubenswrapper[4792]: I0929 19:38:32.161695 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="c3a91ff8-561c-48c6-ad49-205ea9f5a4d6" containerName="extract-content" Sep 29 19:38:32 crc kubenswrapper[4792]: E0929 19:38:32.161741 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1a5948bb-2b33-40f6-9a12-1b8b4e3071a7" containerName="libvirt-edpm-deployment-openstack-edpm-ipam" Sep 29 19:38:32 crc kubenswrapper[4792]: I0929 19:38:32.161758 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="1a5948bb-2b33-40f6-9a12-1b8b4e3071a7" containerName="libvirt-edpm-deployment-openstack-edpm-ipam" Sep 29 19:38:32 crc kubenswrapper[4792]: I0929 19:38:32.162115 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="1a5948bb-2b33-40f6-9a12-1b8b4e3071a7" containerName="libvirt-edpm-deployment-openstack-edpm-ipam" Sep 29 19:38:32 crc kubenswrapper[4792]: I0929 19:38:32.162150 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="c3a91ff8-561c-48c6-ad49-205ea9f5a4d6" containerName="registry-server" Sep 29 19:38:32 crc kubenswrapper[4792]: I0929 19:38:32.162988 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-ghrcf" Sep 29 19:38:32 crc kubenswrapper[4792]: I0929 19:38:32.168310 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Sep 29 19:38:32 crc kubenswrapper[4792]: I0929 19:38:32.169704 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-migration-ssh-key" Sep 29 19:38:32 crc kubenswrapper[4792]: I0929 19:38:32.169872 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Sep 29 19:38:32 crc kubenswrapper[4792]: I0929 19:38:32.170145 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-compute-config" Sep 29 19:38:32 crc kubenswrapper[4792]: I0929 19:38:32.170394 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"nova-extra-config" Sep 29 19:38:32 crc kubenswrapper[4792]: I0929 19:38:32.170564 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Sep 29 19:38:32 crc kubenswrapper[4792]: I0929 19:38:32.170682 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-jvdxd" Sep 29 19:38:32 crc kubenswrapper[4792]: I0929 19:38:32.179866 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-edpm-deployment-openstack-edpm-ipam-ghrcf"] Sep 29 19:38:32 crc kubenswrapper[4792]: I0929 19:38:32.232941 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/477ec7c1-0c72-4b69-9a72-05d465fe26b9-nova-combined-ca-bundle\") pod \"nova-edpm-deployment-openstack-edpm-ipam-ghrcf\" (UID: \"477ec7c1-0c72-4b69-9a72-05d465fe26b9\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-ghrcf" Sep 29 19:38:32 crc kubenswrapper[4792]: I0929 19:38:32.233022 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/477ec7c1-0c72-4b69-9a72-05d465fe26b9-nova-migration-ssh-key-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-ghrcf\" (UID: \"477ec7c1-0c72-4b69-9a72-05d465fe26b9\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-ghrcf" Sep 29 19:38:32 crc kubenswrapper[4792]: I0929 19:38:32.233225 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/477ec7c1-0c72-4b69-9a72-05d465fe26b9-nova-cell1-compute-config-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-ghrcf\" (UID: \"477ec7c1-0c72-4b69-9a72-05d465fe26b9\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-ghrcf" Sep 29 19:38:32 crc kubenswrapper[4792]: I0929 19:38:32.233381 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/477ec7c1-0c72-4b69-9a72-05d465fe26b9-nova-extra-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-ghrcf\" (UID: \"477ec7c1-0c72-4b69-9a72-05d465fe26b9\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-ghrcf" Sep 29 19:38:32 crc kubenswrapper[4792]: I0929 19:38:32.233410 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/477ec7c1-0c72-4b69-9a72-05d465fe26b9-nova-cell1-compute-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-ghrcf\" (UID: \"477ec7c1-0c72-4b69-9a72-05d465fe26b9\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-ghrcf" Sep 29 19:38:32 crc kubenswrapper[4792]: I0929 19:38:32.233636 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lnclq\" (UniqueName: \"kubernetes.io/projected/477ec7c1-0c72-4b69-9a72-05d465fe26b9-kube-api-access-lnclq\") pod \"nova-edpm-deployment-openstack-edpm-ipam-ghrcf\" (UID: \"477ec7c1-0c72-4b69-9a72-05d465fe26b9\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-ghrcf" Sep 29 19:38:32 crc kubenswrapper[4792]: I0929 19:38:32.233672 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/477ec7c1-0c72-4b69-9a72-05d465fe26b9-nova-migration-ssh-key-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-ghrcf\" (UID: \"477ec7c1-0c72-4b69-9a72-05d465fe26b9\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-ghrcf" Sep 29 19:38:32 crc kubenswrapper[4792]: I0929 19:38:32.233703 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/477ec7c1-0c72-4b69-9a72-05d465fe26b9-inventory\") pod \"nova-edpm-deployment-openstack-edpm-ipam-ghrcf\" (UID: \"477ec7c1-0c72-4b69-9a72-05d465fe26b9\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-ghrcf" Sep 29 19:38:32 crc kubenswrapper[4792]: I0929 19:38:32.233723 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/477ec7c1-0c72-4b69-9a72-05d465fe26b9-ssh-key\") pod \"nova-edpm-deployment-openstack-edpm-ipam-ghrcf\" (UID: \"477ec7c1-0c72-4b69-9a72-05d465fe26b9\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-ghrcf" Sep 29 19:38:32 crc kubenswrapper[4792]: I0929 19:38:32.335924 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/477ec7c1-0c72-4b69-9a72-05d465fe26b9-nova-cell1-compute-config-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-ghrcf\" (UID: \"477ec7c1-0c72-4b69-9a72-05d465fe26b9\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-ghrcf" Sep 29 19:38:32 crc kubenswrapper[4792]: I0929 19:38:32.336268 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/477ec7c1-0c72-4b69-9a72-05d465fe26b9-nova-extra-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-ghrcf\" (UID: \"477ec7c1-0c72-4b69-9a72-05d465fe26b9\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-ghrcf" Sep 29 19:38:32 crc kubenswrapper[4792]: I0929 19:38:32.336290 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/477ec7c1-0c72-4b69-9a72-05d465fe26b9-nova-cell1-compute-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-ghrcf\" (UID: \"477ec7c1-0c72-4b69-9a72-05d465fe26b9\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-ghrcf" Sep 29 19:38:32 crc kubenswrapper[4792]: I0929 19:38:32.336360 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lnclq\" (UniqueName: \"kubernetes.io/projected/477ec7c1-0c72-4b69-9a72-05d465fe26b9-kube-api-access-lnclq\") pod \"nova-edpm-deployment-openstack-edpm-ipam-ghrcf\" (UID: \"477ec7c1-0c72-4b69-9a72-05d465fe26b9\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-ghrcf" Sep 29 19:38:32 crc kubenswrapper[4792]: I0929 19:38:32.336387 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/477ec7c1-0c72-4b69-9a72-05d465fe26b9-nova-migration-ssh-key-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-ghrcf\" (UID: \"477ec7c1-0c72-4b69-9a72-05d465fe26b9\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-ghrcf" Sep 29 19:38:32 crc kubenswrapper[4792]: I0929 19:38:32.336405 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/477ec7c1-0c72-4b69-9a72-05d465fe26b9-inventory\") pod \"nova-edpm-deployment-openstack-edpm-ipam-ghrcf\" (UID: \"477ec7c1-0c72-4b69-9a72-05d465fe26b9\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-ghrcf" Sep 29 19:38:32 crc kubenswrapper[4792]: I0929 19:38:32.336424 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/477ec7c1-0c72-4b69-9a72-05d465fe26b9-ssh-key\") pod \"nova-edpm-deployment-openstack-edpm-ipam-ghrcf\" (UID: \"477ec7c1-0c72-4b69-9a72-05d465fe26b9\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-ghrcf" Sep 29 19:38:32 crc kubenswrapper[4792]: I0929 19:38:32.336456 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/477ec7c1-0c72-4b69-9a72-05d465fe26b9-nova-combined-ca-bundle\") pod \"nova-edpm-deployment-openstack-edpm-ipam-ghrcf\" (UID: \"477ec7c1-0c72-4b69-9a72-05d465fe26b9\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-ghrcf" Sep 29 19:38:32 crc kubenswrapper[4792]: I0929 19:38:32.336485 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/477ec7c1-0c72-4b69-9a72-05d465fe26b9-nova-migration-ssh-key-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-ghrcf\" (UID: \"477ec7c1-0c72-4b69-9a72-05d465fe26b9\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-ghrcf" Sep 29 19:38:32 crc kubenswrapper[4792]: I0929 19:38:32.338282 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/477ec7c1-0c72-4b69-9a72-05d465fe26b9-nova-extra-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-ghrcf\" (UID: \"477ec7c1-0c72-4b69-9a72-05d465fe26b9\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-ghrcf" Sep 29 19:38:32 crc kubenswrapper[4792]: I0929 19:38:32.340488 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/477ec7c1-0c72-4b69-9a72-05d465fe26b9-nova-migration-ssh-key-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-ghrcf\" (UID: \"477ec7c1-0c72-4b69-9a72-05d465fe26b9\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-ghrcf" Sep 29 19:38:32 crc kubenswrapper[4792]: I0929 19:38:32.341038 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/477ec7c1-0c72-4b69-9a72-05d465fe26b9-nova-cell1-compute-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-ghrcf\" (UID: \"477ec7c1-0c72-4b69-9a72-05d465fe26b9\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-ghrcf" Sep 29 19:38:32 crc kubenswrapper[4792]: I0929 19:38:32.342023 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/477ec7c1-0c72-4b69-9a72-05d465fe26b9-ssh-key\") pod \"nova-edpm-deployment-openstack-edpm-ipam-ghrcf\" (UID: \"477ec7c1-0c72-4b69-9a72-05d465fe26b9\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-ghrcf" Sep 29 19:38:32 crc kubenswrapper[4792]: I0929 19:38:32.343025 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/477ec7c1-0c72-4b69-9a72-05d465fe26b9-nova-combined-ca-bundle\") pod \"nova-edpm-deployment-openstack-edpm-ipam-ghrcf\" (UID: \"477ec7c1-0c72-4b69-9a72-05d465fe26b9\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-ghrcf" Sep 29 19:38:32 crc kubenswrapper[4792]: I0929 19:38:32.343724 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/477ec7c1-0c72-4b69-9a72-05d465fe26b9-nova-migration-ssh-key-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-ghrcf\" (UID: \"477ec7c1-0c72-4b69-9a72-05d465fe26b9\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-ghrcf" Sep 29 19:38:32 crc kubenswrapper[4792]: I0929 19:38:32.343806 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/477ec7c1-0c72-4b69-9a72-05d465fe26b9-nova-cell1-compute-config-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-ghrcf\" (UID: \"477ec7c1-0c72-4b69-9a72-05d465fe26b9\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-ghrcf" Sep 29 19:38:32 crc kubenswrapper[4792]: I0929 19:38:32.346137 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/477ec7c1-0c72-4b69-9a72-05d465fe26b9-inventory\") pod \"nova-edpm-deployment-openstack-edpm-ipam-ghrcf\" (UID: \"477ec7c1-0c72-4b69-9a72-05d465fe26b9\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-ghrcf" Sep 29 19:38:32 crc kubenswrapper[4792]: I0929 19:38:32.354099 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lnclq\" (UniqueName: \"kubernetes.io/projected/477ec7c1-0c72-4b69-9a72-05d465fe26b9-kube-api-access-lnclq\") pod \"nova-edpm-deployment-openstack-edpm-ipam-ghrcf\" (UID: \"477ec7c1-0c72-4b69-9a72-05d465fe26b9\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-ghrcf" Sep 29 19:38:32 crc kubenswrapper[4792]: I0929 19:38:32.483454 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-ghrcf" Sep 29 19:38:33 crc kubenswrapper[4792]: I0929 19:38:33.030970 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-edpm-deployment-openstack-edpm-ipam-ghrcf"] Sep 29 19:38:33 crc kubenswrapper[4792]: I0929 19:38:33.974151 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-ghrcf" event={"ID":"477ec7c1-0c72-4b69-9a72-05d465fe26b9","Type":"ContainerStarted","Data":"76934edb72670e2450bd684af27bea8e2c6d84a395ba9dd6db152c2eca004377"} Sep 29 19:38:33 crc kubenswrapper[4792]: I0929 19:38:33.974544 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-ghrcf" event={"ID":"477ec7c1-0c72-4b69-9a72-05d465fe26b9","Type":"ContainerStarted","Data":"85eca040192b7bbd878313d7a1087b7538e39032102625d27871f3ca15e97415"} Sep 29 19:38:33 crc kubenswrapper[4792]: I0929 19:38:33.997925 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-ghrcf" podStartSLOduration=1.829095438 podStartE2EDuration="1.997901474s" podCreationTimestamp="2025-09-29 19:38:32 +0000 UTC" firstStartedPulling="2025-09-29 19:38:33.036168879 +0000 UTC m=+2525.029476275" lastFinishedPulling="2025-09-29 19:38:33.204974915 +0000 UTC m=+2525.198282311" observedRunningTime="2025-09-29 19:38:33.991705391 +0000 UTC m=+2525.985012787" watchObservedRunningTime="2025-09-29 19:38:33.997901474 +0000 UTC m=+2525.991208870" Sep 29 19:38:42 crc kubenswrapper[4792]: I0929 19:38:42.015726 4792 scope.go:117] "RemoveContainer" containerID="a82ac18e65bc3c0fc963379d6b48e28ffc3451e0b1a7fdcf4bc177930d10ace7" Sep 29 19:38:42 crc kubenswrapper[4792]: E0929 19:38:42.016432 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p5q59_openshift-machine-config-operator(0ae66548-086e-4ca9-bd6f-281ce46e7557)\"" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" podUID="0ae66548-086e-4ca9-bd6f-281ce46e7557" Sep 29 19:38:57 crc kubenswrapper[4792]: I0929 19:38:57.015179 4792 scope.go:117] "RemoveContainer" containerID="a82ac18e65bc3c0fc963379d6b48e28ffc3451e0b1a7fdcf4bc177930d10ace7" Sep 29 19:38:57 crc kubenswrapper[4792]: E0929 19:38:57.016909 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p5q59_openshift-machine-config-operator(0ae66548-086e-4ca9-bd6f-281ce46e7557)\"" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" podUID="0ae66548-086e-4ca9-bd6f-281ce46e7557" Sep 29 19:39:11 crc kubenswrapper[4792]: I0929 19:39:11.017958 4792 scope.go:117] "RemoveContainer" containerID="a82ac18e65bc3c0fc963379d6b48e28ffc3451e0b1a7fdcf4bc177930d10ace7" Sep 29 19:39:11 crc kubenswrapper[4792]: E0929 19:39:11.018779 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p5q59_openshift-machine-config-operator(0ae66548-086e-4ca9-bd6f-281ce46e7557)\"" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" podUID="0ae66548-086e-4ca9-bd6f-281ce46e7557" Sep 29 19:39:23 crc kubenswrapper[4792]: I0929 19:39:23.015740 4792 scope.go:117] "RemoveContainer" containerID="a82ac18e65bc3c0fc963379d6b48e28ffc3451e0b1a7fdcf4bc177930d10ace7" Sep 29 19:39:23 crc kubenswrapper[4792]: E0929 19:39:23.016485 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p5q59_openshift-machine-config-operator(0ae66548-086e-4ca9-bd6f-281ce46e7557)\"" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" podUID="0ae66548-086e-4ca9-bd6f-281ce46e7557" Sep 29 19:39:36 crc kubenswrapper[4792]: I0929 19:39:36.016184 4792 scope.go:117] "RemoveContainer" containerID="a82ac18e65bc3c0fc963379d6b48e28ffc3451e0b1a7fdcf4bc177930d10ace7" Sep 29 19:39:36 crc kubenswrapper[4792]: E0929 19:39:36.016882 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p5q59_openshift-machine-config-operator(0ae66548-086e-4ca9-bd6f-281ce46e7557)\"" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" podUID="0ae66548-086e-4ca9-bd6f-281ce46e7557" Sep 29 19:39:47 crc kubenswrapper[4792]: I0929 19:39:47.016354 4792 scope.go:117] "RemoveContainer" containerID="a82ac18e65bc3c0fc963379d6b48e28ffc3451e0b1a7fdcf4bc177930d10ace7" Sep 29 19:39:47 crc kubenswrapper[4792]: E0929 19:39:47.017215 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p5q59_openshift-machine-config-operator(0ae66548-086e-4ca9-bd6f-281ce46e7557)\"" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" podUID="0ae66548-086e-4ca9-bd6f-281ce46e7557" Sep 29 19:39:58 crc kubenswrapper[4792]: I0929 19:39:58.015416 4792 scope.go:117] "RemoveContainer" containerID="a82ac18e65bc3c0fc963379d6b48e28ffc3451e0b1a7fdcf4bc177930d10ace7" Sep 29 19:39:58 crc kubenswrapper[4792]: E0929 19:39:58.016183 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p5q59_openshift-machine-config-operator(0ae66548-086e-4ca9-bd6f-281ce46e7557)\"" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" podUID="0ae66548-086e-4ca9-bd6f-281ce46e7557" Sep 29 19:40:13 crc kubenswrapper[4792]: I0929 19:40:13.016153 4792 scope.go:117] "RemoveContainer" containerID="a82ac18e65bc3c0fc963379d6b48e28ffc3451e0b1a7fdcf4bc177930d10ace7" Sep 29 19:40:13 crc kubenswrapper[4792]: E0929 19:40:13.017350 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p5q59_openshift-machine-config-operator(0ae66548-086e-4ca9-bd6f-281ce46e7557)\"" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" podUID="0ae66548-086e-4ca9-bd6f-281ce46e7557" Sep 29 19:40:26 crc kubenswrapper[4792]: I0929 19:40:26.015789 4792 scope.go:117] "RemoveContainer" containerID="a82ac18e65bc3c0fc963379d6b48e28ffc3451e0b1a7fdcf4bc177930d10ace7" Sep 29 19:40:26 crc kubenswrapper[4792]: E0929 19:40:26.016620 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p5q59_openshift-machine-config-operator(0ae66548-086e-4ca9-bd6f-281ce46e7557)\"" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" podUID="0ae66548-086e-4ca9-bd6f-281ce46e7557" Sep 29 19:40:35 crc kubenswrapper[4792]: I0929 19:40:35.136475 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-ll8x8"] Sep 29 19:40:35 crc kubenswrapper[4792]: I0929 19:40:35.139390 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-ll8x8" Sep 29 19:40:35 crc kubenswrapper[4792]: I0929 19:40:35.150592 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-ll8x8"] Sep 29 19:40:35 crc kubenswrapper[4792]: I0929 19:40:35.175467 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7k4m6\" (UniqueName: \"kubernetes.io/projected/7900b4a9-9cfc-4799-9a26-2952a0e8bfa3-kube-api-access-7k4m6\") pod \"community-operators-ll8x8\" (UID: \"7900b4a9-9cfc-4799-9a26-2952a0e8bfa3\") " pod="openshift-marketplace/community-operators-ll8x8" Sep 29 19:40:35 crc kubenswrapper[4792]: I0929 19:40:35.175589 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7900b4a9-9cfc-4799-9a26-2952a0e8bfa3-utilities\") pod \"community-operators-ll8x8\" (UID: \"7900b4a9-9cfc-4799-9a26-2952a0e8bfa3\") " pod="openshift-marketplace/community-operators-ll8x8" Sep 29 19:40:35 crc kubenswrapper[4792]: I0929 19:40:35.175653 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7900b4a9-9cfc-4799-9a26-2952a0e8bfa3-catalog-content\") pod \"community-operators-ll8x8\" (UID: \"7900b4a9-9cfc-4799-9a26-2952a0e8bfa3\") " pod="openshift-marketplace/community-operators-ll8x8" Sep 29 19:40:35 crc kubenswrapper[4792]: I0929 19:40:35.276903 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7900b4a9-9cfc-4799-9a26-2952a0e8bfa3-utilities\") pod \"community-operators-ll8x8\" (UID: \"7900b4a9-9cfc-4799-9a26-2952a0e8bfa3\") " pod="openshift-marketplace/community-operators-ll8x8" Sep 29 19:40:35 crc kubenswrapper[4792]: I0929 19:40:35.277000 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7900b4a9-9cfc-4799-9a26-2952a0e8bfa3-catalog-content\") pod \"community-operators-ll8x8\" (UID: \"7900b4a9-9cfc-4799-9a26-2952a0e8bfa3\") " pod="openshift-marketplace/community-operators-ll8x8" Sep 29 19:40:35 crc kubenswrapper[4792]: I0929 19:40:35.277031 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7k4m6\" (UniqueName: \"kubernetes.io/projected/7900b4a9-9cfc-4799-9a26-2952a0e8bfa3-kube-api-access-7k4m6\") pod \"community-operators-ll8x8\" (UID: \"7900b4a9-9cfc-4799-9a26-2952a0e8bfa3\") " pod="openshift-marketplace/community-operators-ll8x8" Sep 29 19:40:35 crc kubenswrapper[4792]: I0929 19:40:35.277781 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7900b4a9-9cfc-4799-9a26-2952a0e8bfa3-catalog-content\") pod \"community-operators-ll8x8\" (UID: \"7900b4a9-9cfc-4799-9a26-2952a0e8bfa3\") " pod="openshift-marketplace/community-operators-ll8x8" Sep 29 19:40:35 crc kubenswrapper[4792]: I0929 19:40:35.277982 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7900b4a9-9cfc-4799-9a26-2952a0e8bfa3-utilities\") pod \"community-operators-ll8x8\" (UID: \"7900b4a9-9cfc-4799-9a26-2952a0e8bfa3\") " pod="openshift-marketplace/community-operators-ll8x8" Sep 29 19:40:35 crc kubenswrapper[4792]: I0929 19:40:35.299472 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7k4m6\" (UniqueName: \"kubernetes.io/projected/7900b4a9-9cfc-4799-9a26-2952a0e8bfa3-kube-api-access-7k4m6\") pod \"community-operators-ll8x8\" (UID: \"7900b4a9-9cfc-4799-9a26-2952a0e8bfa3\") " pod="openshift-marketplace/community-operators-ll8x8" Sep 29 19:40:35 crc kubenswrapper[4792]: I0929 19:40:35.461483 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-ll8x8" Sep 29 19:40:35 crc kubenswrapper[4792]: I0929 19:40:35.991455 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-ll8x8"] Sep 29 19:40:36 crc kubenswrapper[4792]: I0929 19:40:36.133311 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-ll8x8" event={"ID":"7900b4a9-9cfc-4799-9a26-2952a0e8bfa3","Type":"ContainerStarted","Data":"1b20588cf83d72f01398052ad407639e3968810add02c48be14f20c61850f562"} Sep 29 19:40:37 crc kubenswrapper[4792]: I0929 19:40:37.015377 4792 scope.go:117] "RemoveContainer" containerID="a82ac18e65bc3c0fc963379d6b48e28ffc3451e0b1a7fdcf4bc177930d10ace7" Sep 29 19:40:37 crc kubenswrapper[4792]: E0929 19:40:37.015983 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p5q59_openshift-machine-config-operator(0ae66548-086e-4ca9-bd6f-281ce46e7557)\"" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" podUID="0ae66548-086e-4ca9-bd6f-281ce46e7557" Sep 29 19:40:37 crc kubenswrapper[4792]: I0929 19:40:37.142089 4792 generic.go:334] "Generic (PLEG): container finished" podID="7900b4a9-9cfc-4799-9a26-2952a0e8bfa3" containerID="9d8aa1edbc735387be3fa1637ef7d972d3b90cc6b0f28c150b92101d7710bc93" exitCode=0 Sep 29 19:40:37 crc kubenswrapper[4792]: I0929 19:40:37.142179 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-ll8x8" event={"ID":"7900b4a9-9cfc-4799-9a26-2952a0e8bfa3","Type":"ContainerDied","Data":"9d8aa1edbc735387be3fa1637ef7d972d3b90cc6b0f28c150b92101d7710bc93"} Sep 29 19:40:39 crc kubenswrapper[4792]: I0929 19:40:39.161683 4792 generic.go:334] "Generic (PLEG): container finished" podID="7900b4a9-9cfc-4799-9a26-2952a0e8bfa3" containerID="037c4503123cf7c5feaf52aa2d6337b546352ddfb160a41e302ea325061460ad" exitCode=0 Sep 29 19:40:39 crc kubenswrapper[4792]: I0929 19:40:39.161837 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-ll8x8" event={"ID":"7900b4a9-9cfc-4799-9a26-2952a0e8bfa3","Type":"ContainerDied","Data":"037c4503123cf7c5feaf52aa2d6337b546352ddfb160a41e302ea325061460ad"} Sep 29 19:40:40 crc kubenswrapper[4792]: I0929 19:40:40.174039 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-ll8x8" event={"ID":"7900b4a9-9cfc-4799-9a26-2952a0e8bfa3","Type":"ContainerStarted","Data":"77392ad5a06733bc68467dd4e7cbcedc5e4c5a0148fd355c53e6cb047930f87e"} Sep 29 19:40:40 crc kubenswrapper[4792]: I0929 19:40:40.193369 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-ll8x8" podStartSLOduration=2.698421809 podStartE2EDuration="5.193348131s" podCreationTimestamp="2025-09-29 19:40:35 +0000 UTC" firstStartedPulling="2025-09-29 19:40:37.144270168 +0000 UTC m=+2649.137577564" lastFinishedPulling="2025-09-29 19:40:39.63919649 +0000 UTC m=+2651.632503886" observedRunningTime="2025-09-29 19:40:40.191934025 +0000 UTC m=+2652.185241461" watchObservedRunningTime="2025-09-29 19:40:40.193348131 +0000 UTC m=+2652.186655537" Sep 29 19:40:45 crc kubenswrapper[4792]: I0929 19:40:45.463079 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-ll8x8" Sep 29 19:40:45 crc kubenswrapper[4792]: I0929 19:40:45.463653 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-ll8x8" Sep 29 19:40:45 crc kubenswrapper[4792]: I0929 19:40:45.527774 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-ll8x8" Sep 29 19:40:46 crc kubenswrapper[4792]: I0929 19:40:46.265377 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-ll8x8" Sep 29 19:40:46 crc kubenswrapper[4792]: I0929 19:40:46.721654 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-ll8x8"] Sep 29 19:40:48 crc kubenswrapper[4792]: I0929 19:40:48.016303 4792 scope.go:117] "RemoveContainer" containerID="a82ac18e65bc3c0fc963379d6b48e28ffc3451e0b1a7fdcf4bc177930d10ace7" Sep 29 19:40:48 crc kubenswrapper[4792]: E0929 19:40:48.017148 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p5q59_openshift-machine-config-operator(0ae66548-086e-4ca9-bd6f-281ce46e7557)\"" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" podUID="0ae66548-086e-4ca9-bd6f-281ce46e7557" Sep 29 19:40:48 crc kubenswrapper[4792]: I0929 19:40:48.235839 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-ll8x8" podUID="7900b4a9-9cfc-4799-9a26-2952a0e8bfa3" containerName="registry-server" containerID="cri-o://77392ad5a06733bc68467dd4e7cbcedc5e4c5a0148fd355c53e6cb047930f87e" gracePeriod=2 Sep 29 19:40:48 crc kubenswrapper[4792]: I0929 19:40:48.667467 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-ll8x8" Sep 29 19:40:48 crc kubenswrapper[4792]: I0929 19:40:48.743650 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7900b4a9-9cfc-4799-9a26-2952a0e8bfa3-catalog-content\") pod \"7900b4a9-9cfc-4799-9a26-2952a0e8bfa3\" (UID: \"7900b4a9-9cfc-4799-9a26-2952a0e8bfa3\") " Sep 29 19:40:48 crc kubenswrapper[4792]: I0929 19:40:48.743767 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7k4m6\" (UniqueName: \"kubernetes.io/projected/7900b4a9-9cfc-4799-9a26-2952a0e8bfa3-kube-api-access-7k4m6\") pod \"7900b4a9-9cfc-4799-9a26-2952a0e8bfa3\" (UID: \"7900b4a9-9cfc-4799-9a26-2952a0e8bfa3\") " Sep 29 19:40:48 crc kubenswrapper[4792]: I0929 19:40:48.743792 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7900b4a9-9cfc-4799-9a26-2952a0e8bfa3-utilities\") pod \"7900b4a9-9cfc-4799-9a26-2952a0e8bfa3\" (UID: \"7900b4a9-9cfc-4799-9a26-2952a0e8bfa3\") " Sep 29 19:40:48 crc kubenswrapper[4792]: I0929 19:40:48.744791 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7900b4a9-9cfc-4799-9a26-2952a0e8bfa3-utilities" (OuterVolumeSpecName: "utilities") pod "7900b4a9-9cfc-4799-9a26-2952a0e8bfa3" (UID: "7900b4a9-9cfc-4799-9a26-2952a0e8bfa3"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 19:40:48 crc kubenswrapper[4792]: I0929 19:40:48.749180 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7900b4a9-9cfc-4799-9a26-2952a0e8bfa3-kube-api-access-7k4m6" (OuterVolumeSpecName: "kube-api-access-7k4m6") pod "7900b4a9-9cfc-4799-9a26-2952a0e8bfa3" (UID: "7900b4a9-9cfc-4799-9a26-2952a0e8bfa3"). InnerVolumeSpecName "kube-api-access-7k4m6". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:40:48 crc kubenswrapper[4792]: I0929 19:40:48.845803 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7k4m6\" (UniqueName: \"kubernetes.io/projected/7900b4a9-9cfc-4799-9a26-2952a0e8bfa3-kube-api-access-7k4m6\") on node \"crc\" DevicePath \"\"" Sep 29 19:40:48 crc kubenswrapper[4792]: I0929 19:40:48.845839 4792 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7900b4a9-9cfc-4799-9a26-2952a0e8bfa3-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 19:40:49 crc kubenswrapper[4792]: I0929 19:40:49.047203 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7900b4a9-9cfc-4799-9a26-2952a0e8bfa3-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "7900b4a9-9cfc-4799-9a26-2952a0e8bfa3" (UID: "7900b4a9-9cfc-4799-9a26-2952a0e8bfa3"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 19:40:49 crc kubenswrapper[4792]: I0929 19:40:49.050397 4792 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7900b4a9-9cfc-4799-9a26-2952a0e8bfa3-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 19:40:49 crc kubenswrapper[4792]: I0929 19:40:49.250319 4792 generic.go:334] "Generic (PLEG): container finished" podID="7900b4a9-9cfc-4799-9a26-2952a0e8bfa3" containerID="77392ad5a06733bc68467dd4e7cbcedc5e4c5a0148fd355c53e6cb047930f87e" exitCode=0 Sep 29 19:40:49 crc kubenswrapper[4792]: I0929 19:40:49.250370 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-ll8x8" event={"ID":"7900b4a9-9cfc-4799-9a26-2952a0e8bfa3","Type":"ContainerDied","Data":"77392ad5a06733bc68467dd4e7cbcedc5e4c5a0148fd355c53e6cb047930f87e"} Sep 29 19:40:49 crc kubenswrapper[4792]: I0929 19:40:49.250400 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-ll8x8" event={"ID":"7900b4a9-9cfc-4799-9a26-2952a0e8bfa3","Type":"ContainerDied","Data":"1b20588cf83d72f01398052ad407639e3968810add02c48be14f20c61850f562"} Sep 29 19:40:49 crc kubenswrapper[4792]: I0929 19:40:49.250419 4792 scope.go:117] "RemoveContainer" containerID="77392ad5a06733bc68467dd4e7cbcedc5e4c5a0148fd355c53e6cb047930f87e" Sep 29 19:40:49 crc kubenswrapper[4792]: I0929 19:40:49.250575 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-ll8x8" Sep 29 19:40:49 crc kubenswrapper[4792]: I0929 19:40:49.285049 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-ll8x8"] Sep 29 19:40:49 crc kubenswrapper[4792]: I0929 19:40:49.292601 4792 scope.go:117] "RemoveContainer" containerID="037c4503123cf7c5feaf52aa2d6337b546352ddfb160a41e302ea325061460ad" Sep 29 19:40:49 crc kubenswrapper[4792]: I0929 19:40:49.295982 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-ll8x8"] Sep 29 19:40:49 crc kubenswrapper[4792]: I0929 19:40:49.320165 4792 scope.go:117] "RemoveContainer" containerID="9d8aa1edbc735387be3fa1637ef7d972d3b90cc6b0f28c150b92101d7710bc93" Sep 29 19:40:49 crc kubenswrapper[4792]: I0929 19:40:49.359832 4792 scope.go:117] "RemoveContainer" containerID="77392ad5a06733bc68467dd4e7cbcedc5e4c5a0148fd355c53e6cb047930f87e" Sep 29 19:40:49 crc kubenswrapper[4792]: E0929 19:40:49.360329 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"77392ad5a06733bc68467dd4e7cbcedc5e4c5a0148fd355c53e6cb047930f87e\": container with ID starting with 77392ad5a06733bc68467dd4e7cbcedc5e4c5a0148fd355c53e6cb047930f87e not found: ID does not exist" containerID="77392ad5a06733bc68467dd4e7cbcedc5e4c5a0148fd355c53e6cb047930f87e" Sep 29 19:40:49 crc kubenswrapper[4792]: I0929 19:40:49.360367 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"77392ad5a06733bc68467dd4e7cbcedc5e4c5a0148fd355c53e6cb047930f87e"} err="failed to get container status \"77392ad5a06733bc68467dd4e7cbcedc5e4c5a0148fd355c53e6cb047930f87e\": rpc error: code = NotFound desc = could not find container \"77392ad5a06733bc68467dd4e7cbcedc5e4c5a0148fd355c53e6cb047930f87e\": container with ID starting with 77392ad5a06733bc68467dd4e7cbcedc5e4c5a0148fd355c53e6cb047930f87e not found: ID does not exist" Sep 29 19:40:49 crc kubenswrapper[4792]: I0929 19:40:49.360394 4792 scope.go:117] "RemoveContainer" containerID="037c4503123cf7c5feaf52aa2d6337b546352ddfb160a41e302ea325061460ad" Sep 29 19:40:49 crc kubenswrapper[4792]: E0929 19:40:49.360786 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"037c4503123cf7c5feaf52aa2d6337b546352ddfb160a41e302ea325061460ad\": container with ID starting with 037c4503123cf7c5feaf52aa2d6337b546352ddfb160a41e302ea325061460ad not found: ID does not exist" containerID="037c4503123cf7c5feaf52aa2d6337b546352ddfb160a41e302ea325061460ad" Sep 29 19:40:49 crc kubenswrapper[4792]: I0929 19:40:49.360811 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"037c4503123cf7c5feaf52aa2d6337b546352ddfb160a41e302ea325061460ad"} err="failed to get container status \"037c4503123cf7c5feaf52aa2d6337b546352ddfb160a41e302ea325061460ad\": rpc error: code = NotFound desc = could not find container \"037c4503123cf7c5feaf52aa2d6337b546352ddfb160a41e302ea325061460ad\": container with ID starting with 037c4503123cf7c5feaf52aa2d6337b546352ddfb160a41e302ea325061460ad not found: ID does not exist" Sep 29 19:40:49 crc kubenswrapper[4792]: I0929 19:40:49.360828 4792 scope.go:117] "RemoveContainer" containerID="9d8aa1edbc735387be3fa1637ef7d972d3b90cc6b0f28c150b92101d7710bc93" Sep 29 19:40:49 crc kubenswrapper[4792]: E0929 19:40:49.361182 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9d8aa1edbc735387be3fa1637ef7d972d3b90cc6b0f28c150b92101d7710bc93\": container with ID starting with 9d8aa1edbc735387be3fa1637ef7d972d3b90cc6b0f28c150b92101d7710bc93 not found: ID does not exist" containerID="9d8aa1edbc735387be3fa1637ef7d972d3b90cc6b0f28c150b92101d7710bc93" Sep 29 19:40:49 crc kubenswrapper[4792]: I0929 19:40:49.361249 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9d8aa1edbc735387be3fa1637ef7d972d3b90cc6b0f28c150b92101d7710bc93"} err="failed to get container status \"9d8aa1edbc735387be3fa1637ef7d972d3b90cc6b0f28c150b92101d7710bc93\": rpc error: code = NotFound desc = could not find container \"9d8aa1edbc735387be3fa1637ef7d972d3b90cc6b0f28c150b92101d7710bc93\": container with ID starting with 9d8aa1edbc735387be3fa1637ef7d972d3b90cc6b0f28c150b92101d7710bc93 not found: ID does not exist" Sep 29 19:40:51 crc kubenswrapper[4792]: I0929 19:40:51.027943 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7900b4a9-9cfc-4799-9a26-2952a0e8bfa3" path="/var/lib/kubelet/pods/7900b4a9-9cfc-4799-9a26-2952a0e8bfa3/volumes" Sep 29 19:40:59 crc kubenswrapper[4792]: I0929 19:40:59.021201 4792 scope.go:117] "RemoveContainer" containerID="a82ac18e65bc3c0fc963379d6b48e28ffc3451e0b1a7fdcf4bc177930d10ace7" Sep 29 19:40:59 crc kubenswrapper[4792]: E0929 19:40:59.022033 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p5q59_openshift-machine-config-operator(0ae66548-086e-4ca9-bd6f-281ce46e7557)\"" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" podUID="0ae66548-086e-4ca9-bd6f-281ce46e7557" Sep 29 19:41:13 crc kubenswrapper[4792]: I0929 19:41:13.016166 4792 scope.go:117] "RemoveContainer" containerID="a82ac18e65bc3c0fc963379d6b48e28ffc3451e0b1a7fdcf4bc177930d10ace7" Sep 29 19:41:13 crc kubenswrapper[4792]: E0929 19:41:13.017053 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p5q59_openshift-machine-config-operator(0ae66548-086e-4ca9-bd6f-281ce46e7557)\"" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" podUID="0ae66548-086e-4ca9-bd6f-281ce46e7557" Sep 29 19:41:24 crc kubenswrapper[4792]: I0929 19:41:24.015395 4792 scope.go:117] "RemoveContainer" containerID="a82ac18e65bc3c0fc963379d6b48e28ffc3451e0b1a7fdcf4bc177930d10ace7" Sep 29 19:41:24 crc kubenswrapper[4792]: E0929 19:41:24.016190 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p5q59_openshift-machine-config-operator(0ae66548-086e-4ca9-bd6f-281ce46e7557)\"" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" podUID="0ae66548-086e-4ca9-bd6f-281ce46e7557" Sep 29 19:41:36 crc kubenswrapper[4792]: I0929 19:41:36.015481 4792 scope.go:117] "RemoveContainer" containerID="a82ac18e65bc3c0fc963379d6b48e28ffc3451e0b1a7fdcf4bc177930d10ace7" Sep 29 19:41:36 crc kubenswrapper[4792]: E0929 19:41:36.016512 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p5q59_openshift-machine-config-operator(0ae66548-086e-4ca9-bd6f-281ce46e7557)\"" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" podUID="0ae66548-086e-4ca9-bd6f-281ce46e7557" Sep 29 19:41:51 crc kubenswrapper[4792]: I0929 19:41:51.016498 4792 scope.go:117] "RemoveContainer" containerID="a82ac18e65bc3c0fc963379d6b48e28ffc3451e0b1a7fdcf4bc177930d10ace7" Sep 29 19:41:51 crc kubenswrapper[4792]: I0929 19:41:51.835514 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" event={"ID":"0ae66548-086e-4ca9-bd6f-281ce46e7557","Type":"ContainerStarted","Data":"c6365e5183d9e12a4ecc8cc816993eb646b458526ff16370ed5f7570dceeeb3e"} Sep 29 19:42:23 crc kubenswrapper[4792]: I0929 19:42:23.676844 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-rhz27"] Sep 29 19:42:23 crc kubenswrapper[4792]: E0929 19:42:23.678621 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7900b4a9-9cfc-4799-9a26-2952a0e8bfa3" containerName="extract-content" Sep 29 19:42:23 crc kubenswrapper[4792]: I0929 19:42:23.678706 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="7900b4a9-9cfc-4799-9a26-2952a0e8bfa3" containerName="extract-content" Sep 29 19:42:23 crc kubenswrapper[4792]: E0929 19:42:23.678786 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7900b4a9-9cfc-4799-9a26-2952a0e8bfa3" containerName="extract-utilities" Sep 29 19:42:23 crc kubenswrapper[4792]: I0929 19:42:23.678861 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="7900b4a9-9cfc-4799-9a26-2952a0e8bfa3" containerName="extract-utilities" Sep 29 19:42:23 crc kubenswrapper[4792]: E0929 19:42:23.678938 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7900b4a9-9cfc-4799-9a26-2952a0e8bfa3" containerName="registry-server" Sep 29 19:42:23 crc kubenswrapper[4792]: I0929 19:42:23.679007 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="7900b4a9-9cfc-4799-9a26-2952a0e8bfa3" containerName="registry-server" Sep 29 19:42:23 crc kubenswrapper[4792]: I0929 19:42:23.679274 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="7900b4a9-9cfc-4799-9a26-2952a0e8bfa3" containerName="registry-server" Sep 29 19:42:23 crc kubenswrapper[4792]: I0929 19:42:23.680924 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-rhz27" Sep 29 19:42:23 crc kubenswrapper[4792]: I0929 19:42:23.697123 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-rhz27"] Sep 29 19:42:23 crc kubenswrapper[4792]: I0929 19:42:23.770956 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vlp7g\" (UniqueName: \"kubernetes.io/projected/898df5dd-23a4-4777-84b1-6723ca9ad059-kube-api-access-vlp7g\") pod \"redhat-marketplace-rhz27\" (UID: \"898df5dd-23a4-4777-84b1-6723ca9ad059\") " pod="openshift-marketplace/redhat-marketplace-rhz27" Sep 29 19:42:23 crc kubenswrapper[4792]: I0929 19:42:23.771028 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/898df5dd-23a4-4777-84b1-6723ca9ad059-utilities\") pod \"redhat-marketplace-rhz27\" (UID: \"898df5dd-23a4-4777-84b1-6723ca9ad059\") " pod="openshift-marketplace/redhat-marketplace-rhz27" Sep 29 19:42:23 crc kubenswrapper[4792]: I0929 19:42:23.771369 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/898df5dd-23a4-4777-84b1-6723ca9ad059-catalog-content\") pod \"redhat-marketplace-rhz27\" (UID: \"898df5dd-23a4-4777-84b1-6723ca9ad059\") " pod="openshift-marketplace/redhat-marketplace-rhz27" Sep 29 19:42:23 crc kubenswrapper[4792]: I0929 19:42:23.872621 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/898df5dd-23a4-4777-84b1-6723ca9ad059-catalog-content\") pod \"redhat-marketplace-rhz27\" (UID: \"898df5dd-23a4-4777-84b1-6723ca9ad059\") " pod="openshift-marketplace/redhat-marketplace-rhz27" Sep 29 19:42:23 crc kubenswrapper[4792]: I0929 19:42:23.872750 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vlp7g\" (UniqueName: \"kubernetes.io/projected/898df5dd-23a4-4777-84b1-6723ca9ad059-kube-api-access-vlp7g\") pod \"redhat-marketplace-rhz27\" (UID: \"898df5dd-23a4-4777-84b1-6723ca9ad059\") " pod="openshift-marketplace/redhat-marketplace-rhz27" Sep 29 19:42:23 crc kubenswrapper[4792]: I0929 19:42:23.872791 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/898df5dd-23a4-4777-84b1-6723ca9ad059-utilities\") pod \"redhat-marketplace-rhz27\" (UID: \"898df5dd-23a4-4777-84b1-6723ca9ad059\") " pod="openshift-marketplace/redhat-marketplace-rhz27" Sep 29 19:42:23 crc kubenswrapper[4792]: I0929 19:42:23.873136 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/898df5dd-23a4-4777-84b1-6723ca9ad059-catalog-content\") pod \"redhat-marketplace-rhz27\" (UID: \"898df5dd-23a4-4777-84b1-6723ca9ad059\") " pod="openshift-marketplace/redhat-marketplace-rhz27" Sep 29 19:42:23 crc kubenswrapper[4792]: I0929 19:42:23.873187 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/898df5dd-23a4-4777-84b1-6723ca9ad059-utilities\") pod \"redhat-marketplace-rhz27\" (UID: \"898df5dd-23a4-4777-84b1-6723ca9ad059\") " pod="openshift-marketplace/redhat-marketplace-rhz27" Sep 29 19:42:23 crc kubenswrapper[4792]: I0929 19:42:23.895542 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vlp7g\" (UniqueName: \"kubernetes.io/projected/898df5dd-23a4-4777-84b1-6723ca9ad059-kube-api-access-vlp7g\") pod \"redhat-marketplace-rhz27\" (UID: \"898df5dd-23a4-4777-84b1-6723ca9ad059\") " pod="openshift-marketplace/redhat-marketplace-rhz27" Sep 29 19:42:24 crc kubenswrapper[4792]: I0929 19:42:24.001250 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-rhz27" Sep 29 19:42:24 crc kubenswrapper[4792]: I0929 19:42:24.267473 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-wqlv2"] Sep 29 19:42:24 crc kubenswrapper[4792]: I0929 19:42:24.272506 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-wqlv2" Sep 29 19:42:24 crc kubenswrapper[4792]: I0929 19:42:24.279772 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-wqlv2"] Sep 29 19:42:24 crc kubenswrapper[4792]: I0929 19:42:24.382354 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-74xzx\" (UniqueName: \"kubernetes.io/projected/2a513d22-2c55-411d-9784-a8d6667509d4-kube-api-access-74xzx\") pod \"certified-operators-wqlv2\" (UID: \"2a513d22-2c55-411d-9784-a8d6667509d4\") " pod="openshift-marketplace/certified-operators-wqlv2" Sep 29 19:42:24 crc kubenswrapper[4792]: I0929 19:42:24.382399 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2a513d22-2c55-411d-9784-a8d6667509d4-catalog-content\") pod \"certified-operators-wqlv2\" (UID: \"2a513d22-2c55-411d-9784-a8d6667509d4\") " pod="openshift-marketplace/certified-operators-wqlv2" Sep 29 19:42:24 crc kubenswrapper[4792]: I0929 19:42:24.382492 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2a513d22-2c55-411d-9784-a8d6667509d4-utilities\") pod \"certified-operators-wqlv2\" (UID: \"2a513d22-2c55-411d-9784-a8d6667509d4\") " pod="openshift-marketplace/certified-operators-wqlv2" Sep 29 19:42:24 crc kubenswrapper[4792]: I0929 19:42:24.482491 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-rhz27"] Sep 29 19:42:24 crc kubenswrapper[4792]: I0929 19:42:24.489056 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-74xzx\" (UniqueName: \"kubernetes.io/projected/2a513d22-2c55-411d-9784-a8d6667509d4-kube-api-access-74xzx\") pod \"certified-operators-wqlv2\" (UID: \"2a513d22-2c55-411d-9784-a8d6667509d4\") " pod="openshift-marketplace/certified-operators-wqlv2" Sep 29 19:42:24 crc kubenswrapper[4792]: I0929 19:42:24.489108 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2a513d22-2c55-411d-9784-a8d6667509d4-catalog-content\") pod \"certified-operators-wqlv2\" (UID: \"2a513d22-2c55-411d-9784-a8d6667509d4\") " pod="openshift-marketplace/certified-operators-wqlv2" Sep 29 19:42:24 crc kubenswrapper[4792]: I0929 19:42:24.489225 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2a513d22-2c55-411d-9784-a8d6667509d4-utilities\") pod \"certified-operators-wqlv2\" (UID: \"2a513d22-2c55-411d-9784-a8d6667509d4\") " pod="openshift-marketplace/certified-operators-wqlv2" Sep 29 19:42:24 crc kubenswrapper[4792]: I0929 19:42:24.489754 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2a513d22-2c55-411d-9784-a8d6667509d4-utilities\") pod \"certified-operators-wqlv2\" (UID: \"2a513d22-2c55-411d-9784-a8d6667509d4\") " pod="openshift-marketplace/certified-operators-wqlv2" Sep 29 19:42:24 crc kubenswrapper[4792]: I0929 19:42:24.489989 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2a513d22-2c55-411d-9784-a8d6667509d4-catalog-content\") pod \"certified-operators-wqlv2\" (UID: \"2a513d22-2c55-411d-9784-a8d6667509d4\") " pod="openshift-marketplace/certified-operators-wqlv2" Sep 29 19:42:24 crc kubenswrapper[4792]: I0929 19:42:24.510319 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-74xzx\" (UniqueName: \"kubernetes.io/projected/2a513d22-2c55-411d-9784-a8d6667509d4-kube-api-access-74xzx\") pod \"certified-operators-wqlv2\" (UID: \"2a513d22-2c55-411d-9784-a8d6667509d4\") " pod="openshift-marketplace/certified-operators-wqlv2" Sep 29 19:42:24 crc kubenswrapper[4792]: I0929 19:42:24.604301 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-wqlv2" Sep 29 19:42:25 crc kubenswrapper[4792]: I0929 19:42:25.113449 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-wqlv2"] Sep 29 19:42:25 crc kubenswrapper[4792]: I0929 19:42:25.159042 4792 generic.go:334] "Generic (PLEG): container finished" podID="898df5dd-23a4-4777-84b1-6723ca9ad059" containerID="59222802b0cc5b9349ea3c7ab7dea2900ca6f4195ea405f6df2197ca583835af" exitCode=0 Sep 29 19:42:25 crc kubenswrapper[4792]: I0929 19:42:25.159134 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-rhz27" event={"ID":"898df5dd-23a4-4777-84b1-6723ca9ad059","Type":"ContainerDied","Data":"59222802b0cc5b9349ea3c7ab7dea2900ca6f4195ea405f6df2197ca583835af"} Sep 29 19:42:25 crc kubenswrapper[4792]: I0929 19:42:25.159175 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-rhz27" event={"ID":"898df5dd-23a4-4777-84b1-6723ca9ad059","Type":"ContainerStarted","Data":"0fad1f979858cf98745192d36d0a1c305ba466e4cc753d7a211302470b64e409"} Sep 29 19:42:25 crc kubenswrapper[4792]: I0929 19:42:25.167283 4792 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Sep 29 19:42:25 crc kubenswrapper[4792]: I0929 19:42:25.167997 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-wqlv2" event={"ID":"2a513d22-2c55-411d-9784-a8d6667509d4","Type":"ContainerStarted","Data":"d1fcb8bae9e420a6f56aea92848d182892fbb3b419818167915b986c7bc485b9"} Sep 29 19:42:26 crc kubenswrapper[4792]: I0929 19:42:26.178126 4792 generic.go:334] "Generic (PLEG): container finished" podID="2a513d22-2c55-411d-9784-a8d6667509d4" containerID="57fe0152c5b32dd493610fd419c8f5b37016322f120890fc9a4c20d92f736caa" exitCode=0 Sep 29 19:42:26 crc kubenswrapper[4792]: I0929 19:42:26.178276 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-wqlv2" event={"ID":"2a513d22-2c55-411d-9784-a8d6667509d4","Type":"ContainerDied","Data":"57fe0152c5b32dd493610fd419c8f5b37016322f120890fc9a4c20d92f736caa"} Sep 29 19:42:26 crc kubenswrapper[4792]: I0929 19:42:26.184470 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-rhz27" event={"ID":"898df5dd-23a4-4777-84b1-6723ca9ad059","Type":"ContainerStarted","Data":"79b01c37e2a2ed59f743c5cdf6a6e1cb877c6d907c0b88f53de1a7d91e018a0b"} Sep 29 19:42:27 crc kubenswrapper[4792]: I0929 19:42:27.207110 4792 generic.go:334] "Generic (PLEG): container finished" podID="898df5dd-23a4-4777-84b1-6723ca9ad059" containerID="79b01c37e2a2ed59f743c5cdf6a6e1cb877c6d907c0b88f53de1a7d91e018a0b" exitCode=0 Sep 29 19:42:27 crc kubenswrapper[4792]: I0929 19:42:27.207749 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-rhz27" event={"ID":"898df5dd-23a4-4777-84b1-6723ca9ad059","Type":"ContainerDied","Data":"79b01c37e2a2ed59f743c5cdf6a6e1cb877c6d907c0b88f53de1a7d91e018a0b"} Sep 29 19:42:28 crc kubenswrapper[4792]: I0929 19:42:28.217152 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-rhz27" event={"ID":"898df5dd-23a4-4777-84b1-6723ca9ad059","Type":"ContainerStarted","Data":"738d7019059ec89d9d00efb997b33609496ed06eadd19184c3d0030e9f5ffdf1"} Sep 29 19:42:28 crc kubenswrapper[4792]: I0929 19:42:28.219208 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-wqlv2" event={"ID":"2a513d22-2c55-411d-9784-a8d6667509d4","Type":"ContainerStarted","Data":"8f695f6d0cdd20745976deaa356033ceb292dd99bac05ed0c8b398e29537df43"} Sep 29 19:42:28 crc kubenswrapper[4792]: I0929 19:42:28.247448 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-rhz27" podStartSLOduration=2.751926742 podStartE2EDuration="5.247426909s" podCreationTimestamp="2025-09-29 19:42:23 +0000 UTC" firstStartedPulling="2025-09-29 19:42:25.167036567 +0000 UTC m=+2757.160343963" lastFinishedPulling="2025-09-29 19:42:27.662536714 +0000 UTC m=+2759.655844130" observedRunningTime="2025-09-29 19:42:28.240706596 +0000 UTC m=+2760.234014002" watchObservedRunningTime="2025-09-29 19:42:28.247426909 +0000 UTC m=+2760.240734305" Sep 29 19:42:29 crc kubenswrapper[4792]: I0929 19:42:29.244916 4792 generic.go:334] "Generic (PLEG): container finished" podID="2a513d22-2c55-411d-9784-a8d6667509d4" containerID="8f695f6d0cdd20745976deaa356033ceb292dd99bac05ed0c8b398e29537df43" exitCode=0 Sep 29 19:42:29 crc kubenswrapper[4792]: I0929 19:42:29.245119 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-wqlv2" event={"ID":"2a513d22-2c55-411d-9784-a8d6667509d4","Type":"ContainerDied","Data":"8f695f6d0cdd20745976deaa356033ceb292dd99bac05ed0c8b398e29537df43"} Sep 29 19:42:30 crc kubenswrapper[4792]: I0929 19:42:30.255023 4792 generic.go:334] "Generic (PLEG): container finished" podID="477ec7c1-0c72-4b69-9a72-05d465fe26b9" containerID="76934edb72670e2450bd684af27bea8e2c6d84a395ba9dd6db152c2eca004377" exitCode=0 Sep 29 19:42:30 crc kubenswrapper[4792]: I0929 19:42:30.255086 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-ghrcf" event={"ID":"477ec7c1-0c72-4b69-9a72-05d465fe26b9","Type":"ContainerDied","Data":"76934edb72670e2450bd684af27bea8e2c6d84a395ba9dd6db152c2eca004377"} Sep 29 19:42:30 crc kubenswrapper[4792]: I0929 19:42:30.257944 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-wqlv2" event={"ID":"2a513d22-2c55-411d-9784-a8d6667509d4","Type":"ContainerStarted","Data":"1cb139545a6781eb13bc957112bbcfd58ff71f782f67e093f32ca011a86920cc"} Sep 29 19:42:30 crc kubenswrapper[4792]: I0929 19:42:30.308185 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-wqlv2" podStartSLOduration=2.823976632 podStartE2EDuration="6.308159556s" podCreationTimestamp="2025-09-29 19:42:24 +0000 UTC" firstStartedPulling="2025-09-29 19:42:26.180038572 +0000 UTC m=+2758.173345968" lastFinishedPulling="2025-09-29 19:42:29.664221496 +0000 UTC m=+2761.657528892" observedRunningTime="2025-09-29 19:42:30.301152565 +0000 UTC m=+2762.294459961" watchObservedRunningTime="2025-09-29 19:42:30.308159556 +0000 UTC m=+2762.301466972" Sep 29 19:42:31 crc kubenswrapper[4792]: I0929 19:42:31.715148 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-ghrcf" Sep 29 19:42:31 crc kubenswrapper[4792]: I0929 19:42:31.842264 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/477ec7c1-0c72-4b69-9a72-05d465fe26b9-inventory\") pod \"477ec7c1-0c72-4b69-9a72-05d465fe26b9\" (UID: \"477ec7c1-0c72-4b69-9a72-05d465fe26b9\") " Sep 29 19:42:31 crc kubenswrapper[4792]: I0929 19:42:31.842338 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/477ec7c1-0c72-4b69-9a72-05d465fe26b9-nova-migration-ssh-key-1\") pod \"477ec7c1-0c72-4b69-9a72-05d465fe26b9\" (UID: \"477ec7c1-0c72-4b69-9a72-05d465fe26b9\") " Sep 29 19:42:31 crc kubenswrapper[4792]: I0929 19:42:31.842376 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/477ec7c1-0c72-4b69-9a72-05d465fe26b9-nova-cell1-compute-config-0\") pod \"477ec7c1-0c72-4b69-9a72-05d465fe26b9\" (UID: \"477ec7c1-0c72-4b69-9a72-05d465fe26b9\") " Sep 29 19:42:31 crc kubenswrapper[4792]: I0929 19:42:31.842400 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/477ec7c1-0c72-4b69-9a72-05d465fe26b9-nova-combined-ca-bundle\") pod \"477ec7c1-0c72-4b69-9a72-05d465fe26b9\" (UID: \"477ec7c1-0c72-4b69-9a72-05d465fe26b9\") " Sep 29 19:42:31 crc kubenswrapper[4792]: I0929 19:42:31.842475 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lnclq\" (UniqueName: \"kubernetes.io/projected/477ec7c1-0c72-4b69-9a72-05d465fe26b9-kube-api-access-lnclq\") pod \"477ec7c1-0c72-4b69-9a72-05d465fe26b9\" (UID: \"477ec7c1-0c72-4b69-9a72-05d465fe26b9\") " Sep 29 19:42:31 crc kubenswrapper[4792]: I0929 19:42:31.842534 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/477ec7c1-0c72-4b69-9a72-05d465fe26b9-ssh-key\") pod \"477ec7c1-0c72-4b69-9a72-05d465fe26b9\" (UID: \"477ec7c1-0c72-4b69-9a72-05d465fe26b9\") " Sep 29 19:42:31 crc kubenswrapper[4792]: I0929 19:42:31.842588 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/477ec7c1-0c72-4b69-9a72-05d465fe26b9-nova-cell1-compute-config-1\") pod \"477ec7c1-0c72-4b69-9a72-05d465fe26b9\" (UID: \"477ec7c1-0c72-4b69-9a72-05d465fe26b9\") " Sep 29 19:42:31 crc kubenswrapper[4792]: I0929 19:42:31.842636 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/477ec7c1-0c72-4b69-9a72-05d465fe26b9-nova-extra-config-0\") pod \"477ec7c1-0c72-4b69-9a72-05d465fe26b9\" (UID: \"477ec7c1-0c72-4b69-9a72-05d465fe26b9\") " Sep 29 19:42:31 crc kubenswrapper[4792]: I0929 19:42:31.842728 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/477ec7c1-0c72-4b69-9a72-05d465fe26b9-nova-migration-ssh-key-0\") pod \"477ec7c1-0c72-4b69-9a72-05d465fe26b9\" (UID: \"477ec7c1-0c72-4b69-9a72-05d465fe26b9\") " Sep 29 19:42:31 crc kubenswrapper[4792]: I0929 19:42:31.869069 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/477ec7c1-0c72-4b69-9a72-05d465fe26b9-nova-combined-ca-bundle" (OuterVolumeSpecName: "nova-combined-ca-bundle") pod "477ec7c1-0c72-4b69-9a72-05d465fe26b9" (UID: "477ec7c1-0c72-4b69-9a72-05d465fe26b9"). InnerVolumeSpecName "nova-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:42:31 crc kubenswrapper[4792]: I0929 19:42:31.869400 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/477ec7c1-0c72-4b69-9a72-05d465fe26b9-kube-api-access-lnclq" (OuterVolumeSpecName: "kube-api-access-lnclq") pod "477ec7c1-0c72-4b69-9a72-05d465fe26b9" (UID: "477ec7c1-0c72-4b69-9a72-05d465fe26b9"). InnerVolumeSpecName "kube-api-access-lnclq". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:42:31 crc kubenswrapper[4792]: I0929 19:42:31.874627 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/477ec7c1-0c72-4b69-9a72-05d465fe26b9-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "477ec7c1-0c72-4b69-9a72-05d465fe26b9" (UID: "477ec7c1-0c72-4b69-9a72-05d465fe26b9"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:42:31 crc kubenswrapper[4792]: I0929 19:42:31.878861 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/477ec7c1-0c72-4b69-9a72-05d465fe26b9-nova-migration-ssh-key-1" (OuterVolumeSpecName: "nova-migration-ssh-key-1") pod "477ec7c1-0c72-4b69-9a72-05d465fe26b9" (UID: "477ec7c1-0c72-4b69-9a72-05d465fe26b9"). InnerVolumeSpecName "nova-migration-ssh-key-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:42:31 crc kubenswrapper[4792]: I0929 19:42:31.895110 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/477ec7c1-0c72-4b69-9a72-05d465fe26b9-nova-cell1-compute-config-1" (OuterVolumeSpecName: "nova-cell1-compute-config-1") pod "477ec7c1-0c72-4b69-9a72-05d465fe26b9" (UID: "477ec7c1-0c72-4b69-9a72-05d465fe26b9"). InnerVolumeSpecName "nova-cell1-compute-config-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:42:31 crc kubenswrapper[4792]: I0929 19:42:31.900372 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/477ec7c1-0c72-4b69-9a72-05d465fe26b9-nova-extra-config-0" (OuterVolumeSpecName: "nova-extra-config-0") pod "477ec7c1-0c72-4b69-9a72-05d465fe26b9" (UID: "477ec7c1-0c72-4b69-9a72-05d465fe26b9"). InnerVolumeSpecName "nova-extra-config-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:42:31 crc kubenswrapper[4792]: I0929 19:42:31.901933 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/477ec7c1-0c72-4b69-9a72-05d465fe26b9-nova-cell1-compute-config-0" (OuterVolumeSpecName: "nova-cell1-compute-config-0") pod "477ec7c1-0c72-4b69-9a72-05d465fe26b9" (UID: "477ec7c1-0c72-4b69-9a72-05d465fe26b9"). InnerVolumeSpecName "nova-cell1-compute-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:42:31 crc kubenswrapper[4792]: I0929 19:42:31.906609 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/477ec7c1-0c72-4b69-9a72-05d465fe26b9-inventory" (OuterVolumeSpecName: "inventory") pod "477ec7c1-0c72-4b69-9a72-05d465fe26b9" (UID: "477ec7c1-0c72-4b69-9a72-05d465fe26b9"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:42:31 crc kubenswrapper[4792]: I0929 19:42:31.914045 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/477ec7c1-0c72-4b69-9a72-05d465fe26b9-nova-migration-ssh-key-0" (OuterVolumeSpecName: "nova-migration-ssh-key-0") pod "477ec7c1-0c72-4b69-9a72-05d465fe26b9" (UID: "477ec7c1-0c72-4b69-9a72-05d465fe26b9"). InnerVolumeSpecName "nova-migration-ssh-key-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:42:31 crc kubenswrapper[4792]: I0929 19:42:31.953248 4792 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/477ec7c1-0c72-4b69-9a72-05d465fe26b9-ssh-key\") on node \"crc\" DevicePath \"\"" Sep 29 19:42:31 crc kubenswrapper[4792]: I0929 19:42:31.953296 4792 reconciler_common.go:293] "Volume detached for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/477ec7c1-0c72-4b69-9a72-05d465fe26b9-nova-cell1-compute-config-1\") on node \"crc\" DevicePath \"\"" Sep 29 19:42:31 crc kubenswrapper[4792]: I0929 19:42:31.953308 4792 reconciler_common.go:293] "Volume detached for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/477ec7c1-0c72-4b69-9a72-05d465fe26b9-nova-extra-config-0\") on node \"crc\" DevicePath \"\"" Sep 29 19:42:31 crc kubenswrapper[4792]: I0929 19:42:31.953317 4792 reconciler_common.go:293] "Volume detached for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/477ec7c1-0c72-4b69-9a72-05d465fe26b9-nova-migration-ssh-key-0\") on node \"crc\" DevicePath \"\"" Sep 29 19:42:31 crc kubenswrapper[4792]: I0929 19:42:31.953329 4792 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/477ec7c1-0c72-4b69-9a72-05d465fe26b9-inventory\") on node \"crc\" DevicePath \"\"" Sep 29 19:42:31 crc kubenswrapper[4792]: I0929 19:42:31.953338 4792 reconciler_common.go:293] "Volume detached for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/477ec7c1-0c72-4b69-9a72-05d465fe26b9-nova-migration-ssh-key-1\") on node \"crc\" DevicePath \"\"" Sep 29 19:42:31 crc kubenswrapper[4792]: I0929 19:42:31.953348 4792 reconciler_common.go:293] "Volume detached for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/477ec7c1-0c72-4b69-9a72-05d465fe26b9-nova-cell1-compute-config-0\") on node \"crc\" DevicePath \"\"" Sep 29 19:42:31 crc kubenswrapper[4792]: I0929 19:42:31.953358 4792 reconciler_common.go:293] "Volume detached for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/477ec7c1-0c72-4b69-9a72-05d465fe26b9-nova-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 19:42:31 crc kubenswrapper[4792]: I0929 19:42:31.953367 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lnclq\" (UniqueName: \"kubernetes.io/projected/477ec7c1-0c72-4b69-9a72-05d465fe26b9-kube-api-access-lnclq\") on node \"crc\" DevicePath \"\"" Sep 29 19:42:32 crc kubenswrapper[4792]: I0929 19:42:32.276988 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-ghrcf" event={"ID":"477ec7c1-0c72-4b69-9a72-05d465fe26b9","Type":"ContainerDied","Data":"85eca040192b7bbd878313d7a1087b7538e39032102625d27871f3ca15e97415"} Sep 29 19:42:32 crc kubenswrapper[4792]: I0929 19:42:32.277051 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-ghrcf" Sep 29 19:42:32 crc kubenswrapper[4792]: I0929 19:42:32.277083 4792 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="85eca040192b7bbd878313d7a1087b7538e39032102625d27871f3ca15e97415" Sep 29 19:42:32 crc kubenswrapper[4792]: I0929 19:42:32.443723 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/telemetry-edpm-deployment-openstack-edpm-ipam-hfvsz"] Sep 29 19:42:32 crc kubenswrapper[4792]: E0929 19:42:32.444116 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="477ec7c1-0c72-4b69-9a72-05d465fe26b9" containerName="nova-edpm-deployment-openstack-edpm-ipam" Sep 29 19:42:32 crc kubenswrapper[4792]: I0929 19:42:32.444133 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="477ec7c1-0c72-4b69-9a72-05d465fe26b9" containerName="nova-edpm-deployment-openstack-edpm-ipam" Sep 29 19:42:32 crc kubenswrapper[4792]: I0929 19:42:32.444335 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="477ec7c1-0c72-4b69-9a72-05d465fe26b9" containerName="nova-edpm-deployment-openstack-edpm-ipam" Sep 29 19:42:32 crc kubenswrapper[4792]: I0929 19:42:32.444910 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-hfvsz" Sep 29 19:42:32 crc kubenswrapper[4792]: I0929 19:42:32.447323 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Sep 29 19:42:32 crc kubenswrapper[4792]: I0929 19:42:32.447342 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Sep 29 19:42:32 crc kubenswrapper[4792]: I0929 19:42:32.448790 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-jvdxd" Sep 29 19:42:32 crc kubenswrapper[4792]: I0929 19:42:32.448954 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-compute-config-data" Sep 29 19:42:32 crc kubenswrapper[4792]: I0929 19:42:32.449065 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Sep 29 19:42:32 crc kubenswrapper[4792]: I0929 19:42:32.476680 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/telemetry-edpm-deployment-openstack-edpm-ipam-hfvsz"] Sep 29 19:42:32 crc kubenswrapper[4792]: I0929 19:42:32.564713 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dfcmq\" (UniqueName: \"kubernetes.io/projected/62dafb72-d440-48ec-af0e-46ee7e16ab5a-kube-api-access-dfcmq\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-hfvsz\" (UID: \"62dafb72-d440-48ec-af0e-46ee7e16ab5a\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-hfvsz" Sep 29 19:42:32 crc kubenswrapper[4792]: I0929 19:42:32.564897 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/62dafb72-d440-48ec-af0e-46ee7e16ab5a-ceilometer-compute-config-data-0\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-hfvsz\" (UID: \"62dafb72-d440-48ec-af0e-46ee7e16ab5a\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-hfvsz" Sep 29 19:42:32 crc kubenswrapper[4792]: I0929 19:42:32.564964 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/62dafb72-d440-48ec-af0e-46ee7e16ab5a-ceilometer-compute-config-data-1\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-hfvsz\" (UID: \"62dafb72-d440-48ec-af0e-46ee7e16ab5a\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-hfvsz" Sep 29 19:42:32 crc kubenswrapper[4792]: I0929 19:42:32.564993 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/62dafb72-d440-48ec-af0e-46ee7e16ab5a-telemetry-combined-ca-bundle\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-hfvsz\" (UID: \"62dafb72-d440-48ec-af0e-46ee7e16ab5a\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-hfvsz" Sep 29 19:42:32 crc kubenswrapper[4792]: I0929 19:42:32.565031 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/62dafb72-d440-48ec-af0e-46ee7e16ab5a-inventory\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-hfvsz\" (UID: \"62dafb72-d440-48ec-af0e-46ee7e16ab5a\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-hfvsz" Sep 29 19:42:32 crc kubenswrapper[4792]: I0929 19:42:32.565067 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/62dafb72-d440-48ec-af0e-46ee7e16ab5a-ceilometer-compute-config-data-2\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-hfvsz\" (UID: \"62dafb72-d440-48ec-af0e-46ee7e16ab5a\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-hfvsz" Sep 29 19:42:32 crc kubenswrapper[4792]: I0929 19:42:32.565122 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/62dafb72-d440-48ec-af0e-46ee7e16ab5a-ssh-key\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-hfvsz\" (UID: \"62dafb72-d440-48ec-af0e-46ee7e16ab5a\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-hfvsz" Sep 29 19:42:32 crc kubenswrapper[4792]: I0929 19:42:32.667019 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/62dafb72-d440-48ec-af0e-46ee7e16ab5a-ceilometer-compute-config-data-1\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-hfvsz\" (UID: \"62dafb72-d440-48ec-af0e-46ee7e16ab5a\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-hfvsz" Sep 29 19:42:32 crc kubenswrapper[4792]: I0929 19:42:32.667113 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/62dafb72-d440-48ec-af0e-46ee7e16ab5a-telemetry-combined-ca-bundle\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-hfvsz\" (UID: \"62dafb72-d440-48ec-af0e-46ee7e16ab5a\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-hfvsz" Sep 29 19:42:32 crc kubenswrapper[4792]: I0929 19:42:32.667227 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/62dafb72-d440-48ec-af0e-46ee7e16ab5a-inventory\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-hfvsz\" (UID: \"62dafb72-d440-48ec-af0e-46ee7e16ab5a\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-hfvsz" Sep 29 19:42:32 crc kubenswrapper[4792]: I0929 19:42:32.667298 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/62dafb72-d440-48ec-af0e-46ee7e16ab5a-ceilometer-compute-config-data-2\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-hfvsz\" (UID: \"62dafb72-d440-48ec-af0e-46ee7e16ab5a\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-hfvsz" Sep 29 19:42:32 crc kubenswrapper[4792]: I0929 19:42:32.667365 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/62dafb72-d440-48ec-af0e-46ee7e16ab5a-ssh-key\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-hfvsz\" (UID: \"62dafb72-d440-48ec-af0e-46ee7e16ab5a\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-hfvsz" Sep 29 19:42:32 crc kubenswrapper[4792]: I0929 19:42:32.667441 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dfcmq\" (UniqueName: \"kubernetes.io/projected/62dafb72-d440-48ec-af0e-46ee7e16ab5a-kube-api-access-dfcmq\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-hfvsz\" (UID: \"62dafb72-d440-48ec-af0e-46ee7e16ab5a\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-hfvsz" Sep 29 19:42:32 crc kubenswrapper[4792]: I0929 19:42:32.667606 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/62dafb72-d440-48ec-af0e-46ee7e16ab5a-ceilometer-compute-config-data-0\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-hfvsz\" (UID: \"62dafb72-d440-48ec-af0e-46ee7e16ab5a\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-hfvsz" Sep 29 19:42:32 crc kubenswrapper[4792]: I0929 19:42:32.673447 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/62dafb72-d440-48ec-af0e-46ee7e16ab5a-ceilometer-compute-config-data-0\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-hfvsz\" (UID: \"62dafb72-d440-48ec-af0e-46ee7e16ab5a\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-hfvsz" Sep 29 19:42:32 crc kubenswrapper[4792]: I0929 19:42:32.674168 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/62dafb72-d440-48ec-af0e-46ee7e16ab5a-inventory\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-hfvsz\" (UID: \"62dafb72-d440-48ec-af0e-46ee7e16ab5a\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-hfvsz" Sep 29 19:42:32 crc kubenswrapper[4792]: I0929 19:42:32.674244 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/62dafb72-d440-48ec-af0e-46ee7e16ab5a-telemetry-combined-ca-bundle\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-hfvsz\" (UID: \"62dafb72-d440-48ec-af0e-46ee7e16ab5a\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-hfvsz" Sep 29 19:42:32 crc kubenswrapper[4792]: I0929 19:42:32.675011 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/62dafb72-d440-48ec-af0e-46ee7e16ab5a-ceilometer-compute-config-data-2\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-hfvsz\" (UID: \"62dafb72-d440-48ec-af0e-46ee7e16ab5a\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-hfvsz" Sep 29 19:42:32 crc kubenswrapper[4792]: I0929 19:42:32.675286 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/62dafb72-d440-48ec-af0e-46ee7e16ab5a-ssh-key\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-hfvsz\" (UID: \"62dafb72-d440-48ec-af0e-46ee7e16ab5a\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-hfvsz" Sep 29 19:42:32 crc kubenswrapper[4792]: I0929 19:42:32.678174 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/62dafb72-d440-48ec-af0e-46ee7e16ab5a-ceilometer-compute-config-data-1\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-hfvsz\" (UID: \"62dafb72-d440-48ec-af0e-46ee7e16ab5a\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-hfvsz" Sep 29 19:42:32 crc kubenswrapper[4792]: I0929 19:42:32.696565 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dfcmq\" (UniqueName: \"kubernetes.io/projected/62dafb72-d440-48ec-af0e-46ee7e16ab5a-kube-api-access-dfcmq\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-hfvsz\" (UID: \"62dafb72-d440-48ec-af0e-46ee7e16ab5a\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-hfvsz" Sep 29 19:42:32 crc kubenswrapper[4792]: I0929 19:42:32.766578 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-hfvsz" Sep 29 19:42:33 crc kubenswrapper[4792]: I0929 19:42:33.316983 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/telemetry-edpm-deployment-openstack-edpm-ipam-hfvsz"] Sep 29 19:42:33 crc kubenswrapper[4792]: W0929 19:42:33.322433 4792 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod62dafb72_d440_48ec_af0e_46ee7e16ab5a.slice/crio-b19607cd004dca99036321ec16617de2f8c2977c6b19c492a448e460af7d9ca9 WatchSource:0}: Error finding container b19607cd004dca99036321ec16617de2f8c2977c6b19c492a448e460af7d9ca9: Status 404 returned error can't find the container with id b19607cd004dca99036321ec16617de2f8c2977c6b19c492a448e460af7d9ca9 Sep 29 19:42:34 crc kubenswrapper[4792]: I0929 19:42:34.001482 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-rhz27" Sep 29 19:42:34 crc kubenswrapper[4792]: I0929 19:42:34.002690 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-rhz27" Sep 29 19:42:34 crc kubenswrapper[4792]: I0929 19:42:34.059643 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-rhz27" Sep 29 19:42:34 crc kubenswrapper[4792]: I0929 19:42:34.298115 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-hfvsz" event={"ID":"62dafb72-d440-48ec-af0e-46ee7e16ab5a","Type":"ContainerStarted","Data":"7f2ce4e210ebaceeb10658392d48ccdf27f257f476fb9a97129bdadff4df8608"} Sep 29 19:42:34 crc kubenswrapper[4792]: I0929 19:42:34.298402 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-hfvsz" event={"ID":"62dafb72-d440-48ec-af0e-46ee7e16ab5a","Type":"ContainerStarted","Data":"b19607cd004dca99036321ec16617de2f8c2977c6b19c492a448e460af7d9ca9"} Sep 29 19:42:34 crc kubenswrapper[4792]: I0929 19:42:34.323682 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-hfvsz" podStartSLOduration=2.014433983 podStartE2EDuration="2.323662984s" podCreationTimestamp="2025-09-29 19:42:32 +0000 UTC" firstStartedPulling="2025-09-29 19:42:33.325249055 +0000 UTC m=+2765.318556451" lastFinishedPulling="2025-09-29 19:42:33.634478056 +0000 UTC m=+2765.627785452" observedRunningTime="2025-09-29 19:42:34.31809999 +0000 UTC m=+2766.311407386" watchObservedRunningTime="2025-09-29 19:42:34.323662984 +0000 UTC m=+2766.316970380" Sep 29 19:42:34 crc kubenswrapper[4792]: I0929 19:42:34.343795 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-rhz27" Sep 29 19:42:34 crc kubenswrapper[4792]: I0929 19:42:34.605418 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-wqlv2" Sep 29 19:42:34 crc kubenswrapper[4792]: I0929 19:42:34.605477 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-wqlv2" Sep 29 19:42:34 crc kubenswrapper[4792]: I0929 19:42:34.656006 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-wqlv2" Sep 29 19:42:35 crc kubenswrapper[4792]: I0929 19:42:35.367257 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-wqlv2" Sep 29 19:42:35 crc kubenswrapper[4792]: I0929 19:42:35.652174 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-rhz27"] Sep 29 19:42:36 crc kubenswrapper[4792]: I0929 19:42:36.315793 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-rhz27" podUID="898df5dd-23a4-4777-84b1-6723ca9ad059" containerName="registry-server" containerID="cri-o://738d7019059ec89d9d00efb997b33609496ed06eadd19184c3d0030e9f5ffdf1" gracePeriod=2 Sep 29 19:42:36 crc kubenswrapper[4792]: I0929 19:42:36.751036 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-rhz27" Sep 29 19:42:36 crc kubenswrapper[4792]: I0929 19:42:36.849377 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vlp7g\" (UniqueName: \"kubernetes.io/projected/898df5dd-23a4-4777-84b1-6723ca9ad059-kube-api-access-vlp7g\") pod \"898df5dd-23a4-4777-84b1-6723ca9ad059\" (UID: \"898df5dd-23a4-4777-84b1-6723ca9ad059\") " Sep 29 19:42:36 crc kubenswrapper[4792]: I0929 19:42:36.849486 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/898df5dd-23a4-4777-84b1-6723ca9ad059-catalog-content\") pod \"898df5dd-23a4-4777-84b1-6723ca9ad059\" (UID: \"898df5dd-23a4-4777-84b1-6723ca9ad059\") " Sep 29 19:42:36 crc kubenswrapper[4792]: I0929 19:42:36.849598 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/898df5dd-23a4-4777-84b1-6723ca9ad059-utilities\") pod \"898df5dd-23a4-4777-84b1-6723ca9ad059\" (UID: \"898df5dd-23a4-4777-84b1-6723ca9ad059\") " Sep 29 19:42:36 crc kubenswrapper[4792]: I0929 19:42:36.850728 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/898df5dd-23a4-4777-84b1-6723ca9ad059-utilities" (OuterVolumeSpecName: "utilities") pod "898df5dd-23a4-4777-84b1-6723ca9ad059" (UID: "898df5dd-23a4-4777-84b1-6723ca9ad059"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 19:42:36 crc kubenswrapper[4792]: I0929 19:42:36.859083 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/898df5dd-23a4-4777-84b1-6723ca9ad059-kube-api-access-vlp7g" (OuterVolumeSpecName: "kube-api-access-vlp7g") pod "898df5dd-23a4-4777-84b1-6723ca9ad059" (UID: "898df5dd-23a4-4777-84b1-6723ca9ad059"). InnerVolumeSpecName "kube-api-access-vlp7g". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:42:36 crc kubenswrapper[4792]: I0929 19:42:36.863003 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/898df5dd-23a4-4777-84b1-6723ca9ad059-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "898df5dd-23a4-4777-84b1-6723ca9ad059" (UID: "898df5dd-23a4-4777-84b1-6723ca9ad059"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 19:42:36 crc kubenswrapper[4792]: I0929 19:42:36.953190 4792 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/898df5dd-23a4-4777-84b1-6723ca9ad059-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 19:42:36 crc kubenswrapper[4792]: I0929 19:42:36.953471 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vlp7g\" (UniqueName: \"kubernetes.io/projected/898df5dd-23a4-4777-84b1-6723ca9ad059-kube-api-access-vlp7g\") on node \"crc\" DevicePath \"\"" Sep 29 19:42:36 crc kubenswrapper[4792]: I0929 19:42:36.953481 4792 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/898df5dd-23a4-4777-84b1-6723ca9ad059-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 19:42:37 crc kubenswrapper[4792]: I0929 19:42:37.047637 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-wqlv2"] Sep 29 19:42:37 crc kubenswrapper[4792]: I0929 19:42:37.327285 4792 generic.go:334] "Generic (PLEG): container finished" podID="898df5dd-23a4-4777-84b1-6723ca9ad059" containerID="738d7019059ec89d9d00efb997b33609496ed06eadd19184c3d0030e9f5ffdf1" exitCode=0 Sep 29 19:42:37 crc kubenswrapper[4792]: I0929 19:42:37.327490 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-wqlv2" podUID="2a513d22-2c55-411d-9784-a8d6667509d4" containerName="registry-server" containerID="cri-o://1cb139545a6781eb13bc957112bbcfd58ff71f782f67e093f32ca011a86920cc" gracePeriod=2 Sep 29 19:42:37 crc kubenswrapper[4792]: I0929 19:42:37.327926 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-rhz27" Sep 29 19:42:37 crc kubenswrapper[4792]: I0929 19:42:37.328628 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-rhz27" event={"ID":"898df5dd-23a4-4777-84b1-6723ca9ad059","Type":"ContainerDied","Data":"738d7019059ec89d9d00efb997b33609496ed06eadd19184c3d0030e9f5ffdf1"} Sep 29 19:42:37 crc kubenswrapper[4792]: I0929 19:42:37.328659 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-rhz27" event={"ID":"898df5dd-23a4-4777-84b1-6723ca9ad059","Type":"ContainerDied","Data":"0fad1f979858cf98745192d36d0a1c305ba466e4cc753d7a211302470b64e409"} Sep 29 19:42:37 crc kubenswrapper[4792]: I0929 19:42:37.328676 4792 scope.go:117] "RemoveContainer" containerID="738d7019059ec89d9d00efb997b33609496ed06eadd19184c3d0030e9f5ffdf1" Sep 29 19:42:37 crc kubenswrapper[4792]: I0929 19:42:37.358233 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-rhz27"] Sep 29 19:42:37 crc kubenswrapper[4792]: I0929 19:42:37.371015 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-rhz27"] Sep 29 19:42:37 crc kubenswrapper[4792]: I0929 19:42:37.373271 4792 scope.go:117] "RemoveContainer" containerID="79b01c37e2a2ed59f743c5cdf6a6e1cb877c6d907c0b88f53de1a7d91e018a0b" Sep 29 19:42:37 crc kubenswrapper[4792]: I0929 19:42:37.395183 4792 scope.go:117] "RemoveContainer" containerID="59222802b0cc5b9349ea3c7ab7dea2900ca6f4195ea405f6df2197ca583835af" Sep 29 19:42:37 crc kubenswrapper[4792]: I0929 19:42:37.436511 4792 scope.go:117] "RemoveContainer" containerID="738d7019059ec89d9d00efb997b33609496ed06eadd19184c3d0030e9f5ffdf1" Sep 29 19:42:37 crc kubenswrapper[4792]: E0929 19:42:37.436988 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"738d7019059ec89d9d00efb997b33609496ed06eadd19184c3d0030e9f5ffdf1\": container with ID starting with 738d7019059ec89d9d00efb997b33609496ed06eadd19184c3d0030e9f5ffdf1 not found: ID does not exist" containerID="738d7019059ec89d9d00efb997b33609496ed06eadd19184c3d0030e9f5ffdf1" Sep 29 19:42:37 crc kubenswrapper[4792]: I0929 19:42:37.437042 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"738d7019059ec89d9d00efb997b33609496ed06eadd19184c3d0030e9f5ffdf1"} err="failed to get container status \"738d7019059ec89d9d00efb997b33609496ed06eadd19184c3d0030e9f5ffdf1\": rpc error: code = NotFound desc = could not find container \"738d7019059ec89d9d00efb997b33609496ed06eadd19184c3d0030e9f5ffdf1\": container with ID starting with 738d7019059ec89d9d00efb997b33609496ed06eadd19184c3d0030e9f5ffdf1 not found: ID does not exist" Sep 29 19:42:37 crc kubenswrapper[4792]: I0929 19:42:37.437076 4792 scope.go:117] "RemoveContainer" containerID="79b01c37e2a2ed59f743c5cdf6a6e1cb877c6d907c0b88f53de1a7d91e018a0b" Sep 29 19:42:37 crc kubenswrapper[4792]: E0929 19:42:37.437430 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"79b01c37e2a2ed59f743c5cdf6a6e1cb877c6d907c0b88f53de1a7d91e018a0b\": container with ID starting with 79b01c37e2a2ed59f743c5cdf6a6e1cb877c6d907c0b88f53de1a7d91e018a0b not found: ID does not exist" containerID="79b01c37e2a2ed59f743c5cdf6a6e1cb877c6d907c0b88f53de1a7d91e018a0b" Sep 29 19:42:37 crc kubenswrapper[4792]: I0929 19:42:37.437471 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"79b01c37e2a2ed59f743c5cdf6a6e1cb877c6d907c0b88f53de1a7d91e018a0b"} err="failed to get container status \"79b01c37e2a2ed59f743c5cdf6a6e1cb877c6d907c0b88f53de1a7d91e018a0b\": rpc error: code = NotFound desc = could not find container \"79b01c37e2a2ed59f743c5cdf6a6e1cb877c6d907c0b88f53de1a7d91e018a0b\": container with ID starting with 79b01c37e2a2ed59f743c5cdf6a6e1cb877c6d907c0b88f53de1a7d91e018a0b not found: ID does not exist" Sep 29 19:42:37 crc kubenswrapper[4792]: I0929 19:42:37.437501 4792 scope.go:117] "RemoveContainer" containerID="59222802b0cc5b9349ea3c7ab7dea2900ca6f4195ea405f6df2197ca583835af" Sep 29 19:42:37 crc kubenswrapper[4792]: E0929 19:42:37.437752 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"59222802b0cc5b9349ea3c7ab7dea2900ca6f4195ea405f6df2197ca583835af\": container with ID starting with 59222802b0cc5b9349ea3c7ab7dea2900ca6f4195ea405f6df2197ca583835af not found: ID does not exist" containerID="59222802b0cc5b9349ea3c7ab7dea2900ca6f4195ea405f6df2197ca583835af" Sep 29 19:42:37 crc kubenswrapper[4792]: I0929 19:42:37.437779 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"59222802b0cc5b9349ea3c7ab7dea2900ca6f4195ea405f6df2197ca583835af"} err="failed to get container status \"59222802b0cc5b9349ea3c7ab7dea2900ca6f4195ea405f6df2197ca583835af\": rpc error: code = NotFound desc = could not find container \"59222802b0cc5b9349ea3c7ab7dea2900ca6f4195ea405f6df2197ca583835af\": container with ID starting with 59222802b0cc5b9349ea3c7ab7dea2900ca6f4195ea405f6df2197ca583835af not found: ID does not exist" Sep 29 19:42:37 crc kubenswrapper[4792]: I0929 19:42:37.807862 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-wqlv2" Sep 29 19:42:37 crc kubenswrapper[4792]: I0929 19:42:37.877134 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2a513d22-2c55-411d-9784-a8d6667509d4-utilities\") pod \"2a513d22-2c55-411d-9784-a8d6667509d4\" (UID: \"2a513d22-2c55-411d-9784-a8d6667509d4\") " Sep 29 19:42:37 crc kubenswrapper[4792]: I0929 19:42:37.877319 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-74xzx\" (UniqueName: \"kubernetes.io/projected/2a513d22-2c55-411d-9784-a8d6667509d4-kube-api-access-74xzx\") pod \"2a513d22-2c55-411d-9784-a8d6667509d4\" (UID: \"2a513d22-2c55-411d-9784-a8d6667509d4\") " Sep 29 19:42:37 crc kubenswrapper[4792]: I0929 19:42:37.877378 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2a513d22-2c55-411d-9784-a8d6667509d4-catalog-content\") pod \"2a513d22-2c55-411d-9784-a8d6667509d4\" (UID: \"2a513d22-2c55-411d-9784-a8d6667509d4\") " Sep 29 19:42:37 crc kubenswrapper[4792]: I0929 19:42:37.878139 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2a513d22-2c55-411d-9784-a8d6667509d4-utilities" (OuterVolumeSpecName: "utilities") pod "2a513d22-2c55-411d-9784-a8d6667509d4" (UID: "2a513d22-2c55-411d-9784-a8d6667509d4"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 19:42:37 crc kubenswrapper[4792]: I0929 19:42:37.882879 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2a513d22-2c55-411d-9784-a8d6667509d4-kube-api-access-74xzx" (OuterVolumeSpecName: "kube-api-access-74xzx") pod "2a513d22-2c55-411d-9784-a8d6667509d4" (UID: "2a513d22-2c55-411d-9784-a8d6667509d4"). InnerVolumeSpecName "kube-api-access-74xzx". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:42:37 crc kubenswrapper[4792]: I0929 19:42:37.921809 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2a513d22-2c55-411d-9784-a8d6667509d4-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "2a513d22-2c55-411d-9784-a8d6667509d4" (UID: "2a513d22-2c55-411d-9784-a8d6667509d4"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 19:42:37 crc kubenswrapper[4792]: I0929 19:42:37.980080 4792 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2a513d22-2c55-411d-9784-a8d6667509d4-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 19:42:37 crc kubenswrapper[4792]: I0929 19:42:37.980114 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-74xzx\" (UniqueName: \"kubernetes.io/projected/2a513d22-2c55-411d-9784-a8d6667509d4-kube-api-access-74xzx\") on node \"crc\" DevicePath \"\"" Sep 29 19:42:37 crc kubenswrapper[4792]: I0929 19:42:37.980123 4792 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2a513d22-2c55-411d-9784-a8d6667509d4-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 19:42:38 crc kubenswrapper[4792]: I0929 19:42:38.340548 4792 generic.go:334] "Generic (PLEG): container finished" podID="2a513d22-2c55-411d-9784-a8d6667509d4" containerID="1cb139545a6781eb13bc957112bbcfd58ff71f782f67e093f32ca011a86920cc" exitCode=0 Sep 29 19:42:38 crc kubenswrapper[4792]: I0929 19:42:38.340607 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-wqlv2" event={"ID":"2a513d22-2c55-411d-9784-a8d6667509d4","Type":"ContainerDied","Data":"1cb139545a6781eb13bc957112bbcfd58ff71f782f67e093f32ca011a86920cc"} Sep 29 19:42:38 crc kubenswrapper[4792]: I0929 19:42:38.340656 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-wqlv2" event={"ID":"2a513d22-2c55-411d-9784-a8d6667509d4","Type":"ContainerDied","Data":"d1fcb8bae9e420a6f56aea92848d182892fbb3b419818167915b986c7bc485b9"} Sep 29 19:42:38 crc kubenswrapper[4792]: I0929 19:42:38.340696 4792 scope.go:117] "RemoveContainer" containerID="1cb139545a6781eb13bc957112bbcfd58ff71f782f67e093f32ca011a86920cc" Sep 29 19:42:38 crc kubenswrapper[4792]: I0929 19:42:38.341461 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-wqlv2" Sep 29 19:42:38 crc kubenswrapper[4792]: I0929 19:42:38.364016 4792 scope.go:117] "RemoveContainer" containerID="8f695f6d0cdd20745976deaa356033ceb292dd99bac05ed0c8b398e29537df43" Sep 29 19:42:38 crc kubenswrapper[4792]: I0929 19:42:38.395274 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-wqlv2"] Sep 29 19:42:38 crc kubenswrapper[4792]: I0929 19:42:38.404935 4792 scope.go:117] "RemoveContainer" containerID="57fe0152c5b32dd493610fd419c8f5b37016322f120890fc9a4c20d92f736caa" Sep 29 19:42:38 crc kubenswrapper[4792]: I0929 19:42:38.406996 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-wqlv2"] Sep 29 19:42:38 crc kubenswrapper[4792]: I0929 19:42:38.442602 4792 scope.go:117] "RemoveContainer" containerID="1cb139545a6781eb13bc957112bbcfd58ff71f782f67e093f32ca011a86920cc" Sep 29 19:42:38 crc kubenswrapper[4792]: E0929 19:42:38.443062 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1cb139545a6781eb13bc957112bbcfd58ff71f782f67e093f32ca011a86920cc\": container with ID starting with 1cb139545a6781eb13bc957112bbcfd58ff71f782f67e093f32ca011a86920cc not found: ID does not exist" containerID="1cb139545a6781eb13bc957112bbcfd58ff71f782f67e093f32ca011a86920cc" Sep 29 19:42:38 crc kubenswrapper[4792]: I0929 19:42:38.443090 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1cb139545a6781eb13bc957112bbcfd58ff71f782f67e093f32ca011a86920cc"} err="failed to get container status \"1cb139545a6781eb13bc957112bbcfd58ff71f782f67e093f32ca011a86920cc\": rpc error: code = NotFound desc = could not find container \"1cb139545a6781eb13bc957112bbcfd58ff71f782f67e093f32ca011a86920cc\": container with ID starting with 1cb139545a6781eb13bc957112bbcfd58ff71f782f67e093f32ca011a86920cc not found: ID does not exist" Sep 29 19:42:38 crc kubenswrapper[4792]: I0929 19:42:38.443107 4792 scope.go:117] "RemoveContainer" containerID="8f695f6d0cdd20745976deaa356033ceb292dd99bac05ed0c8b398e29537df43" Sep 29 19:42:38 crc kubenswrapper[4792]: E0929 19:42:38.443397 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8f695f6d0cdd20745976deaa356033ceb292dd99bac05ed0c8b398e29537df43\": container with ID starting with 8f695f6d0cdd20745976deaa356033ceb292dd99bac05ed0c8b398e29537df43 not found: ID does not exist" containerID="8f695f6d0cdd20745976deaa356033ceb292dd99bac05ed0c8b398e29537df43" Sep 29 19:42:38 crc kubenswrapper[4792]: I0929 19:42:38.443412 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8f695f6d0cdd20745976deaa356033ceb292dd99bac05ed0c8b398e29537df43"} err="failed to get container status \"8f695f6d0cdd20745976deaa356033ceb292dd99bac05ed0c8b398e29537df43\": rpc error: code = NotFound desc = could not find container \"8f695f6d0cdd20745976deaa356033ceb292dd99bac05ed0c8b398e29537df43\": container with ID starting with 8f695f6d0cdd20745976deaa356033ceb292dd99bac05ed0c8b398e29537df43 not found: ID does not exist" Sep 29 19:42:38 crc kubenswrapper[4792]: I0929 19:42:38.443426 4792 scope.go:117] "RemoveContainer" containerID="57fe0152c5b32dd493610fd419c8f5b37016322f120890fc9a4c20d92f736caa" Sep 29 19:42:38 crc kubenswrapper[4792]: E0929 19:42:38.443678 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"57fe0152c5b32dd493610fd419c8f5b37016322f120890fc9a4c20d92f736caa\": container with ID starting with 57fe0152c5b32dd493610fd419c8f5b37016322f120890fc9a4c20d92f736caa not found: ID does not exist" containerID="57fe0152c5b32dd493610fd419c8f5b37016322f120890fc9a4c20d92f736caa" Sep 29 19:42:38 crc kubenswrapper[4792]: I0929 19:42:38.443699 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"57fe0152c5b32dd493610fd419c8f5b37016322f120890fc9a4c20d92f736caa"} err="failed to get container status \"57fe0152c5b32dd493610fd419c8f5b37016322f120890fc9a4c20d92f736caa\": rpc error: code = NotFound desc = could not find container \"57fe0152c5b32dd493610fd419c8f5b37016322f120890fc9a4c20d92f736caa\": container with ID starting with 57fe0152c5b32dd493610fd419c8f5b37016322f120890fc9a4c20d92f736caa not found: ID does not exist" Sep 29 19:42:39 crc kubenswrapper[4792]: I0929 19:42:39.027513 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2a513d22-2c55-411d-9784-a8d6667509d4" path="/var/lib/kubelet/pods/2a513d22-2c55-411d-9784-a8d6667509d4/volumes" Sep 29 19:42:39 crc kubenswrapper[4792]: I0929 19:42:39.028197 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="898df5dd-23a4-4777-84b1-6723ca9ad059" path="/var/lib/kubelet/pods/898df5dd-23a4-4777-84b1-6723ca9ad059/volumes" Sep 29 19:44:11 crc kubenswrapper[4792]: I0929 19:44:11.959877 4792 patch_prober.go:28] interesting pod/machine-config-daemon-p5q59 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 19:44:11 crc kubenswrapper[4792]: I0929 19:44:11.960516 4792 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" podUID="0ae66548-086e-4ca9-bd6f-281ce46e7557" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 19:44:41 crc kubenswrapper[4792]: I0929 19:44:41.959914 4792 patch_prober.go:28] interesting pod/machine-config-daemon-p5q59 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 19:44:41 crc kubenswrapper[4792]: I0929 19:44:41.962148 4792 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" podUID="0ae66548-086e-4ca9-bd6f-281ce46e7557" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 19:45:00 crc kubenswrapper[4792]: I0929 19:45:00.187488 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319585-hzsxw"] Sep 29 19:45:00 crc kubenswrapper[4792]: E0929 19:45:00.188621 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="898df5dd-23a4-4777-84b1-6723ca9ad059" containerName="registry-server" Sep 29 19:45:00 crc kubenswrapper[4792]: I0929 19:45:00.188639 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="898df5dd-23a4-4777-84b1-6723ca9ad059" containerName="registry-server" Sep 29 19:45:00 crc kubenswrapper[4792]: E0929 19:45:00.188651 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2a513d22-2c55-411d-9784-a8d6667509d4" containerName="extract-utilities" Sep 29 19:45:00 crc kubenswrapper[4792]: I0929 19:45:00.188660 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="2a513d22-2c55-411d-9784-a8d6667509d4" containerName="extract-utilities" Sep 29 19:45:00 crc kubenswrapper[4792]: E0929 19:45:00.188671 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="898df5dd-23a4-4777-84b1-6723ca9ad059" containerName="extract-content" Sep 29 19:45:00 crc kubenswrapper[4792]: I0929 19:45:00.188682 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="898df5dd-23a4-4777-84b1-6723ca9ad059" containerName="extract-content" Sep 29 19:45:00 crc kubenswrapper[4792]: E0929 19:45:00.188713 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2a513d22-2c55-411d-9784-a8d6667509d4" containerName="registry-server" Sep 29 19:45:00 crc kubenswrapper[4792]: I0929 19:45:00.188721 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="2a513d22-2c55-411d-9784-a8d6667509d4" containerName="registry-server" Sep 29 19:45:00 crc kubenswrapper[4792]: E0929 19:45:00.188752 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2a513d22-2c55-411d-9784-a8d6667509d4" containerName="extract-content" Sep 29 19:45:00 crc kubenswrapper[4792]: I0929 19:45:00.188761 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="2a513d22-2c55-411d-9784-a8d6667509d4" containerName="extract-content" Sep 29 19:45:00 crc kubenswrapper[4792]: E0929 19:45:00.188774 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="898df5dd-23a4-4777-84b1-6723ca9ad059" containerName="extract-utilities" Sep 29 19:45:00 crc kubenswrapper[4792]: I0929 19:45:00.188783 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="898df5dd-23a4-4777-84b1-6723ca9ad059" containerName="extract-utilities" Sep 29 19:45:00 crc kubenswrapper[4792]: I0929 19:45:00.189056 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="2a513d22-2c55-411d-9784-a8d6667509d4" containerName="registry-server" Sep 29 19:45:00 crc kubenswrapper[4792]: I0929 19:45:00.189094 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="898df5dd-23a4-4777-84b1-6723ca9ad059" containerName="registry-server" Sep 29 19:45:00 crc kubenswrapper[4792]: I0929 19:45:00.189797 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319585-hzsxw" Sep 29 19:45:00 crc kubenswrapper[4792]: I0929 19:45:00.198394 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Sep 29 19:45:00 crc kubenswrapper[4792]: I0929 19:45:00.198603 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Sep 29 19:45:00 crc kubenswrapper[4792]: I0929 19:45:00.217799 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319585-hzsxw"] Sep 29 19:45:00 crc kubenswrapper[4792]: I0929 19:45:00.220651 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/92b4bdfd-1f6a-4003-bf1b-d3a5f99de7c4-config-volume\") pod \"collect-profiles-29319585-hzsxw\" (UID: \"92b4bdfd-1f6a-4003-bf1b-d3a5f99de7c4\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319585-hzsxw" Sep 29 19:45:00 crc kubenswrapper[4792]: I0929 19:45:00.220737 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/92b4bdfd-1f6a-4003-bf1b-d3a5f99de7c4-secret-volume\") pod \"collect-profiles-29319585-hzsxw\" (UID: \"92b4bdfd-1f6a-4003-bf1b-d3a5f99de7c4\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319585-hzsxw" Sep 29 19:45:00 crc kubenswrapper[4792]: I0929 19:45:00.220830 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8j5l6\" (UniqueName: \"kubernetes.io/projected/92b4bdfd-1f6a-4003-bf1b-d3a5f99de7c4-kube-api-access-8j5l6\") pod \"collect-profiles-29319585-hzsxw\" (UID: \"92b4bdfd-1f6a-4003-bf1b-d3a5f99de7c4\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319585-hzsxw" Sep 29 19:45:00 crc kubenswrapper[4792]: I0929 19:45:00.322534 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8j5l6\" (UniqueName: \"kubernetes.io/projected/92b4bdfd-1f6a-4003-bf1b-d3a5f99de7c4-kube-api-access-8j5l6\") pod \"collect-profiles-29319585-hzsxw\" (UID: \"92b4bdfd-1f6a-4003-bf1b-d3a5f99de7c4\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319585-hzsxw" Sep 29 19:45:00 crc kubenswrapper[4792]: I0929 19:45:00.322659 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/92b4bdfd-1f6a-4003-bf1b-d3a5f99de7c4-config-volume\") pod \"collect-profiles-29319585-hzsxw\" (UID: \"92b4bdfd-1f6a-4003-bf1b-d3a5f99de7c4\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319585-hzsxw" Sep 29 19:45:00 crc kubenswrapper[4792]: I0929 19:45:00.322704 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/92b4bdfd-1f6a-4003-bf1b-d3a5f99de7c4-secret-volume\") pod \"collect-profiles-29319585-hzsxw\" (UID: \"92b4bdfd-1f6a-4003-bf1b-d3a5f99de7c4\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319585-hzsxw" Sep 29 19:45:00 crc kubenswrapper[4792]: I0929 19:45:00.324432 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/92b4bdfd-1f6a-4003-bf1b-d3a5f99de7c4-config-volume\") pod \"collect-profiles-29319585-hzsxw\" (UID: \"92b4bdfd-1f6a-4003-bf1b-d3a5f99de7c4\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319585-hzsxw" Sep 29 19:45:00 crc kubenswrapper[4792]: I0929 19:45:00.331674 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/92b4bdfd-1f6a-4003-bf1b-d3a5f99de7c4-secret-volume\") pod \"collect-profiles-29319585-hzsxw\" (UID: \"92b4bdfd-1f6a-4003-bf1b-d3a5f99de7c4\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319585-hzsxw" Sep 29 19:45:00 crc kubenswrapper[4792]: I0929 19:45:00.345963 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8j5l6\" (UniqueName: \"kubernetes.io/projected/92b4bdfd-1f6a-4003-bf1b-d3a5f99de7c4-kube-api-access-8j5l6\") pod \"collect-profiles-29319585-hzsxw\" (UID: \"92b4bdfd-1f6a-4003-bf1b-d3a5f99de7c4\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319585-hzsxw" Sep 29 19:45:00 crc kubenswrapper[4792]: I0929 19:45:00.512469 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319585-hzsxw" Sep 29 19:45:00 crc kubenswrapper[4792]: I0929 19:45:00.967785 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319585-hzsxw"] Sep 29 19:45:01 crc kubenswrapper[4792]: I0929 19:45:01.701405 4792 generic.go:334] "Generic (PLEG): container finished" podID="92b4bdfd-1f6a-4003-bf1b-d3a5f99de7c4" containerID="eb1cd926f0d95859c3d0112fb86f76b642c84f90c3ff2508eeb79bd6a5b1ca79" exitCode=0 Sep 29 19:45:01 crc kubenswrapper[4792]: I0929 19:45:01.701501 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29319585-hzsxw" event={"ID":"92b4bdfd-1f6a-4003-bf1b-d3a5f99de7c4","Type":"ContainerDied","Data":"eb1cd926f0d95859c3d0112fb86f76b642c84f90c3ff2508eeb79bd6a5b1ca79"} Sep 29 19:45:01 crc kubenswrapper[4792]: I0929 19:45:01.701807 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29319585-hzsxw" event={"ID":"92b4bdfd-1f6a-4003-bf1b-d3a5f99de7c4","Type":"ContainerStarted","Data":"cba218fb8b5a6df02311ac4fb2db6df1169e54dff268cd63afd79498c2b4d1f7"} Sep 29 19:45:03 crc kubenswrapper[4792]: I0929 19:45:03.033322 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319585-hzsxw" Sep 29 19:45:03 crc kubenswrapper[4792]: I0929 19:45:03.080619 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/92b4bdfd-1f6a-4003-bf1b-d3a5f99de7c4-secret-volume\") pod \"92b4bdfd-1f6a-4003-bf1b-d3a5f99de7c4\" (UID: \"92b4bdfd-1f6a-4003-bf1b-d3a5f99de7c4\") " Sep 29 19:45:03 crc kubenswrapper[4792]: I0929 19:45:03.080725 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8j5l6\" (UniqueName: \"kubernetes.io/projected/92b4bdfd-1f6a-4003-bf1b-d3a5f99de7c4-kube-api-access-8j5l6\") pod \"92b4bdfd-1f6a-4003-bf1b-d3a5f99de7c4\" (UID: \"92b4bdfd-1f6a-4003-bf1b-d3a5f99de7c4\") " Sep 29 19:45:03 crc kubenswrapper[4792]: I0929 19:45:03.080754 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/92b4bdfd-1f6a-4003-bf1b-d3a5f99de7c4-config-volume\") pod \"92b4bdfd-1f6a-4003-bf1b-d3a5f99de7c4\" (UID: \"92b4bdfd-1f6a-4003-bf1b-d3a5f99de7c4\") " Sep 29 19:45:03 crc kubenswrapper[4792]: I0929 19:45:03.082390 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/92b4bdfd-1f6a-4003-bf1b-d3a5f99de7c4-config-volume" (OuterVolumeSpecName: "config-volume") pod "92b4bdfd-1f6a-4003-bf1b-d3a5f99de7c4" (UID: "92b4bdfd-1f6a-4003-bf1b-d3a5f99de7c4"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:45:03 crc kubenswrapper[4792]: I0929 19:45:03.088696 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/92b4bdfd-1f6a-4003-bf1b-d3a5f99de7c4-kube-api-access-8j5l6" (OuterVolumeSpecName: "kube-api-access-8j5l6") pod "92b4bdfd-1f6a-4003-bf1b-d3a5f99de7c4" (UID: "92b4bdfd-1f6a-4003-bf1b-d3a5f99de7c4"). InnerVolumeSpecName "kube-api-access-8j5l6". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:45:03 crc kubenswrapper[4792]: I0929 19:45:03.095001 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/92b4bdfd-1f6a-4003-bf1b-d3a5f99de7c4-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "92b4bdfd-1f6a-4003-bf1b-d3a5f99de7c4" (UID: "92b4bdfd-1f6a-4003-bf1b-d3a5f99de7c4"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:45:03 crc kubenswrapper[4792]: I0929 19:45:03.183170 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8j5l6\" (UniqueName: \"kubernetes.io/projected/92b4bdfd-1f6a-4003-bf1b-d3a5f99de7c4-kube-api-access-8j5l6\") on node \"crc\" DevicePath \"\"" Sep 29 19:45:03 crc kubenswrapper[4792]: I0929 19:45:03.183208 4792 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/92b4bdfd-1f6a-4003-bf1b-d3a5f99de7c4-config-volume\") on node \"crc\" DevicePath \"\"" Sep 29 19:45:03 crc kubenswrapper[4792]: I0929 19:45:03.183218 4792 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/92b4bdfd-1f6a-4003-bf1b-d3a5f99de7c4-secret-volume\") on node \"crc\" DevicePath \"\"" Sep 29 19:45:03 crc kubenswrapper[4792]: I0929 19:45:03.720532 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29319585-hzsxw" event={"ID":"92b4bdfd-1f6a-4003-bf1b-d3a5f99de7c4","Type":"ContainerDied","Data":"cba218fb8b5a6df02311ac4fb2db6df1169e54dff268cd63afd79498c2b4d1f7"} Sep 29 19:45:03 crc kubenswrapper[4792]: I0929 19:45:03.720582 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319585-hzsxw" Sep 29 19:45:03 crc kubenswrapper[4792]: I0929 19:45:03.720593 4792 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="cba218fb8b5a6df02311ac4fb2db6df1169e54dff268cd63afd79498c2b4d1f7" Sep 29 19:45:04 crc kubenswrapper[4792]: I0929 19:45:04.133178 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319540-7hrng"] Sep 29 19:45:04 crc kubenswrapper[4792]: I0929 19:45:04.140003 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319540-7hrng"] Sep 29 19:45:05 crc kubenswrapper[4792]: I0929 19:45:05.027639 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a627c392-8375-4d2b-be05-3b7b6b697664" path="/var/lib/kubelet/pods/a627c392-8375-4d2b-be05-3b7b6b697664/volumes" Sep 29 19:45:11 crc kubenswrapper[4792]: I0929 19:45:11.959599 4792 patch_prober.go:28] interesting pod/machine-config-daemon-p5q59 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 19:45:11 crc kubenswrapper[4792]: I0929 19:45:11.960208 4792 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" podUID="0ae66548-086e-4ca9-bd6f-281ce46e7557" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 19:45:11 crc kubenswrapper[4792]: I0929 19:45:11.960249 4792 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" Sep 29 19:45:11 crc kubenswrapper[4792]: I0929 19:45:11.960909 4792 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"c6365e5183d9e12a4ecc8cc816993eb646b458526ff16370ed5f7570dceeeb3e"} pod="openshift-machine-config-operator/machine-config-daemon-p5q59" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 29 19:45:11 crc kubenswrapper[4792]: I0929 19:45:11.960955 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" podUID="0ae66548-086e-4ca9-bd6f-281ce46e7557" containerName="machine-config-daemon" containerID="cri-o://c6365e5183d9e12a4ecc8cc816993eb646b458526ff16370ed5f7570dceeeb3e" gracePeriod=600 Sep 29 19:45:12 crc kubenswrapper[4792]: I0929 19:45:12.803444 4792 generic.go:334] "Generic (PLEG): container finished" podID="0ae66548-086e-4ca9-bd6f-281ce46e7557" containerID="c6365e5183d9e12a4ecc8cc816993eb646b458526ff16370ed5f7570dceeeb3e" exitCode=0 Sep 29 19:45:12 crc kubenswrapper[4792]: I0929 19:45:12.803525 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" event={"ID":"0ae66548-086e-4ca9-bd6f-281ce46e7557","Type":"ContainerDied","Data":"c6365e5183d9e12a4ecc8cc816993eb646b458526ff16370ed5f7570dceeeb3e"} Sep 29 19:45:12 crc kubenswrapper[4792]: I0929 19:45:12.804233 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" event={"ID":"0ae66548-086e-4ca9-bd6f-281ce46e7557","Type":"ContainerStarted","Data":"b15932957497b7a32079aab059d55a779c760572378662950e68542536286e36"} Sep 29 19:45:12 crc kubenswrapper[4792]: I0929 19:45:12.804268 4792 scope.go:117] "RemoveContainer" containerID="a82ac18e65bc3c0fc963379d6b48e28ffc3451e0b1a7fdcf4bc177930d10ace7" Sep 29 19:45:38 crc kubenswrapper[4792]: I0929 19:45:38.210739 4792 scope.go:117] "RemoveContainer" containerID="439ec775c6478dba6c71e3ec1b2e23b47207eb0f683df24624f9027115c2fd81" Sep 29 19:46:18 crc kubenswrapper[4792]: I0929 19:46:18.410290 4792 generic.go:334] "Generic (PLEG): container finished" podID="62dafb72-d440-48ec-af0e-46ee7e16ab5a" containerID="7f2ce4e210ebaceeb10658392d48ccdf27f257f476fb9a97129bdadff4df8608" exitCode=0 Sep 29 19:46:18 crc kubenswrapper[4792]: I0929 19:46:18.410787 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-hfvsz" event={"ID":"62dafb72-d440-48ec-af0e-46ee7e16ab5a","Type":"ContainerDied","Data":"7f2ce4e210ebaceeb10658392d48ccdf27f257f476fb9a97129bdadff4df8608"} Sep 29 19:46:19 crc kubenswrapper[4792]: I0929 19:46:19.823091 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-hfvsz" Sep 29 19:46:19 crc kubenswrapper[4792]: I0929 19:46:19.938345 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/62dafb72-d440-48ec-af0e-46ee7e16ab5a-ceilometer-compute-config-data-1\") pod \"62dafb72-d440-48ec-af0e-46ee7e16ab5a\" (UID: \"62dafb72-d440-48ec-af0e-46ee7e16ab5a\") " Sep 29 19:46:19 crc kubenswrapper[4792]: I0929 19:46:19.938792 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/62dafb72-d440-48ec-af0e-46ee7e16ab5a-telemetry-combined-ca-bundle\") pod \"62dafb72-d440-48ec-af0e-46ee7e16ab5a\" (UID: \"62dafb72-d440-48ec-af0e-46ee7e16ab5a\") " Sep 29 19:46:19 crc kubenswrapper[4792]: I0929 19:46:19.938910 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/62dafb72-d440-48ec-af0e-46ee7e16ab5a-inventory\") pod \"62dafb72-d440-48ec-af0e-46ee7e16ab5a\" (UID: \"62dafb72-d440-48ec-af0e-46ee7e16ab5a\") " Sep 29 19:46:19 crc kubenswrapper[4792]: I0929 19:46:19.938993 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/62dafb72-d440-48ec-af0e-46ee7e16ab5a-ceilometer-compute-config-data-2\") pod \"62dafb72-d440-48ec-af0e-46ee7e16ab5a\" (UID: \"62dafb72-d440-48ec-af0e-46ee7e16ab5a\") " Sep 29 19:46:19 crc kubenswrapper[4792]: I0929 19:46:19.939028 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/62dafb72-d440-48ec-af0e-46ee7e16ab5a-ssh-key\") pod \"62dafb72-d440-48ec-af0e-46ee7e16ab5a\" (UID: \"62dafb72-d440-48ec-af0e-46ee7e16ab5a\") " Sep 29 19:46:19 crc kubenswrapper[4792]: I0929 19:46:19.939349 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/62dafb72-d440-48ec-af0e-46ee7e16ab5a-ceilometer-compute-config-data-0\") pod \"62dafb72-d440-48ec-af0e-46ee7e16ab5a\" (UID: \"62dafb72-d440-48ec-af0e-46ee7e16ab5a\") " Sep 29 19:46:19 crc kubenswrapper[4792]: I0929 19:46:19.940041 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dfcmq\" (UniqueName: \"kubernetes.io/projected/62dafb72-d440-48ec-af0e-46ee7e16ab5a-kube-api-access-dfcmq\") pod \"62dafb72-d440-48ec-af0e-46ee7e16ab5a\" (UID: \"62dafb72-d440-48ec-af0e-46ee7e16ab5a\") " Sep 29 19:46:19 crc kubenswrapper[4792]: I0929 19:46:19.945542 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/62dafb72-d440-48ec-af0e-46ee7e16ab5a-telemetry-combined-ca-bundle" (OuterVolumeSpecName: "telemetry-combined-ca-bundle") pod "62dafb72-d440-48ec-af0e-46ee7e16ab5a" (UID: "62dafb72-d440-48ec-af0e-46ee7e16ab5a"). InnerVolumeSpecName "telemetry-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:46:19 crc kubenswrapper[4792]: I0929 19:46:19.953003 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/62dafb72-d440-48ec-af0e-46ee7e16ab5a-kube-api-access-dfcmq" (OuterVolumeSpecName: "kube-api-access-dfcmq") pod "62dafb72-d440-48ec-af0e-46ee7e16ab5a" (UID: "62dafb72-d440-48ec-af0e-46ee7e16ab5a"). InnerVolumeSpecName "kube-api-access-dfcmq". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:46:19 crc kubenswrapper[4792]: I0929 19:46:19.966191 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/62dafb72-d440-48ec-af0e-46ee7e16ab5a-ceilometer-compute-config-data-2" (OuterVolumeSpecName: "ceilometer-compute-config-data-2") pod "62dafb72-d440-48ec-af0e-46ee7e16ab5a" (UID: "62dafb72-d440-48ec-af0e-46ee7e16ab5a"). InnerVolumeSpecName "ceilometer-compute-config-data-2". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:46:19 crc kubenswrapper[4792]: I0929 19:46:19.969662 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/62dafb72-d440-48ec-af0e-46ee7e16ab5a-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "62dafb72-d440-48ec-af0e-46ee7e16ab5a" (UID: "62dafb72-d440-48ec-af0e-46ee7e16ab5a"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:46:19 crc kubenswrapper[4792]: I0929 19:46:19.970516 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/62dafb72-d440-48ec-af0e-46ee7e16ab5a-ceilometer-compute-config-data-0" (OuterVolumeSpecName: "ceilometer-compute-config-data-0") pod "62dafb72-d440-48ec-af0e-46ee7e16ab5a" (UID: "62dafb72-d440-48ec-af0e-46ee7e16ab5a"). InnerVolumeSpecName "ceilometer-compute-config-data-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:46:19 crc kubenswrapper[4792]: I0929 19:46:19.972246 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/62dafb72-d440-48ec-af0e-46ee7e16ab5a-ceilometer-compute-config-data-1" (OuterVolumeSpecName: "ceilometer-compute-config-data-1") pod "62dafb72-d440-48ec-af0e-46ee7e16ab5a" (UID: "62dafb72-d440-48ec-af0e-46ee7e16ab5a"). InnerVolumeSpecName "ceilometer-compute-config-data-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:46:19 crc kubenswrapper[4792]: I0929 19:46:19.991301 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/62dafb72-d440-48ec-af0e-46ee7e16ab5a-inventory" (OuterVolumeSpecName: "inventory") pod "62dafb72-d440-48ec-af0e-46ee7e16ab5a" (UID: "62dafb72-d440-48ec-af0e-46ee7e16ab5a"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:46:20 crc kubenswrapper[4792]: I0929 19:46:20.043112 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dfcmq\" (UniqueName: \"kubernetes.io/projected/62dafb72-d440-48ec-af0e-46ee7e16ab5a-kube-api-access-dfcmq\") on node \"crc\" DevicePath \"\"" Sep 29 19:46:20 crc kubenswrapper[4792]: I0929 19:46:20.043195 4792 reconciler_common.go:293] "Volume detached for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/62dafb72-d440-48ec-af0e-46ee7e16ab5a-ceilometer-compute-config-data-1\") on node \"crc\" DevicePath \"\"" Sep 29 19:46:20 crc kubenswrapper[4792]: I0929 19:46:20.043206 4792 reconciler_common.go:293] "Volume detached for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/62dafb72-d440-48ec-af0e-46ee7e16ab5a-telemetry-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 19:46:20 crc kubenswrapper[4792]: I0929 19:46:20.043221 4792 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/62dafb72-d440-48ec-af0e-46ee7e16ab5a-inventory\") on node \"crc\" DevicePath \"\"" Sep 29 19:46:20 crc kubenswrapper[4792]: I0929 19:46:20.043233 4792 reconciler_common.go:293] "Volume detached for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/62dafb72-d440-48ec-af0e-46ee7e16ab5a-ceilometer-compute-config-data-2\") on node \"crc\" DevicePath \"\"" Sep 29 19:46:20 crc kubenswrapper[4792]: I0929 19:46:20.043242 4792 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/62dafb72-d440-48ec-af0e-46ee7e16ab5a-ssh-key\") on node \"crc\" DevicePath \"\"" Sep 29 19:46:20 crc kubenswrapper[4792]: I0929 19:46:20.043251 4792 reconciler_common.go:293] "Volume detached for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/62dafb72-d440-48ec-af0e-46ee7e16ab5a-ceilometer-compute-config-data-0\") on node \"crc\" DevicePath \"\"" Sep 29 19:46:20 crc kubenswrapper[4792]: I0929 19:46:20.435487 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-hfvsz" event={"ID":"62dafb72-d440-48ec-af0e-46ee7e16ab5a","Type":"ContainerDied","Data":"b19607cd004dca99036321ec16617de2f8c2977c6b19c492a448e460af7d9ca9"} Sep 29 19:46:20 crc kubenswrapper[4792]: I0929 19:46:20.435543 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-hfvsz" Sep 29 19:46:20 crc kubenswrapper[4792]: I0929 19:46:20.435555 4792 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b19607cd004dca99036321ec16617de2f8c2977c6b19c492a448e460af7d9ca9" Sep 29 19:47:23 crc kubenswrapper[4792]: I0929 19:47:23.083513 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/tempest-tests-tempest"] Sep 29 19:47:23 crc kubenswrapper[4792]: E0929 19:47:23.085033 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="92b4bdfd-1f6a-4003-bf1b-d3a5f99de7c4" containerName="collect-profiles" Sep 29 19:47:23 crc kubenswrapper[4792]: I0929 19:47:23.085061 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="92b4bdfd-1f6a-4003-bf1b-d3a5f99de7c4" containerName="collect-profiles" Sep 29 19:47:23 crc kubenswrapper[4792]: E0929 19:47:23.085082 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="62dafb72-d440-48ec-af0e-46ee7e16ab5a" containerName="telemetry-edpm-deployment-openstack-edpm-ipam" Sep 29 19:47:23 crc kubenswrapper[4792]: I0929 19:47:23.085096 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="62dafb72-d440-48ec-af0e-46ee7e16ab5a" containerName="telemetry-edpm-deployment-openstack-edpm-ipam" Sep 29 19:47:23 crc kubenswrapper[4792]: I0929 19:47:23.085507 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="62dafb72-d440-48ec-af0e-46ee7e16ab5a" containerName="telemetry-edpm-deployment-openstack-edpm-ipam" Sep 29 19:47:23 crc kubenswrapper[4792]: I0929 19:47:23.085549 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="92b4bdfd-1f6a-4003-bf1b-d3a5f99de7c4" containerName="collect-profiles" Sep 29 19:47:23 crc kubenswrapper[4792]: I0929 19:47:23.086679 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/tempest-tests-tempest" Sep 29 19:47:23 crc kubenswrapper[4792]: I0929 19:47:23.092045 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"tempest-tests-tempest-custom-data-s0" Sep 29 19:47:23 crc kubenswrapper[4792]: I0929 19:47:23.092217 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"default-dockercfg-rwtst" Sep 29 19:47:23 crc kubenswrapper[4792]: I0929 19:47:23.093021 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"tempest-tests-tempest-env-vars-s0" Sep 29 19:47:23 crc kubenswrapper[4792]: I0929 19:47:23.094108 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"test-operator-controller-priv-key" Sep 29 19:47:23 crc kubenswrapper[4792]: I0929 19:47:23.112021 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/tempest-tests-tempest"] Sep 29 19:47:23 crc kubenswrapper[4792]: I0929 19:47:23.205005 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/49e8a61d-e4e3-4510-b209-7d6fb5b02e2b-openstack-config\") pod \"tempest-tests-tempest\" (UID: \"49e8a61d-e4e3-4510-b209-7d6fb5b02e2b\") " pod="openstack/tempest-tests-tempest" Sep 29 19:47:23 crc kubenswrapper[4792]: I0929 19:47:23.205065 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/49e8a61d-e4e3-4510-b209-7d6fb5b02e2b-openstack-config-secret\") pod \"tempest-tests-tempest\" (UID: \"49e8a61d-e4e3-4510-b209-7d6fb5b02e2b\") " pod="openstack/tempest-tests-tempest" Sep 29 19:47:23 crc kubenswrapper[4792]: I0929 19:47:23.205164 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hs88s\" (UniqueName: \"kubernetes.io/projected/49e8a61d-e4e3-4510-b209-7d6fb5b02e2b-kube-api-access-hs88s\") pod \"tempest-tests-tempest\" (UID: \"49e8a61d-e4e3-4510-b209-7d6fb5b02e2b\") " pod="openstack/tempest-tests-tempest" Sep 29 19:47:23 crc kubenswrapper[4792]: I0929 19:47:23.205208 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/49e8a61d-e4e3-4510-b209-7d6fb5b02e2b-config-data\") pod \"tempest-tests-tempest\" (UID: \"49e8a61d-e4e3-4510-b209-7d6fb5b02e2b\") " pod="openstack/tempest-tests-tempest" Sep 29 19:47:23 crc kubenswrapper[4792]: I0929 19:47:23.205230 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/49e8a61d-e4e3-4510-b209-7d6fb5b02e2b-ca-certs\") pod \"tempest-tests-tempest\" (UID: \"49e8a61d-e4e3-4510-b209-7d6fb5b02e2b\") " pod="openstack/tempest-tests-tempest" Sep 29 19:47:23 crc kubenswrapper[4792]: I0929 19:47:23.205269 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/49e8a61d-e4e3-4510-b209-7d6fb5b02e2b-test-operator-ephemeral-workdir\") pod \"tempest-tests-tempest\" (UID: \"49e8a61d-e4e3-4510-b209-7d6fb5b02e2b\") " pod="openstack/tempest-tests-tempest" Sep 29 19:47:23 crc kubenswrapper[4792]: I0929 19:47:23.205306 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"tempest-tests-tempest\" (UID: \"49e8a61d-e4e3-4510-b209-7d6fb5b02e2b\") " pod="openstack/tempest-tests-tempest" Sep 29 19:47:23 crc kubenswrapper[4792]: I0929 19:47:23.205319 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/49e8a61d-e4e3-4510-b209-7d6fb5b02e2b-ssh-key\") pod \"tempest-tests-tempest\" (UID: \"49e8a61d-e4e3-4510-b209-7d6fb5b02e2b\") " pod="openstack/tempest-tests-tempest" Sep 29 19:47:23 crc kubenswrapper[4792]: I0929 19:47:23.205336 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/49e8a61d-e4e3-4510-b209-7d6fb5b02e2b-test-operator-ephemeral-temporary\") pod \"tempest-tests-tempest\" (UID: \"49e8a61d-e4e3-4510-b209-7d6fb5b02e2b\") " pod="openstack/tempest-tests-tempest" Sep 29 19:47:23 crc kubenswrapper[4792]: I0929 19:47:23.307272 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hs88s\" (UniqueName: \"kubernetes.io/projected/49e8a61d-e4e3-4510-b209-7d6fb5b02e2b-kube-api-access-hs88s\") pod \"tempest-tests-tempest\" (UID: \"49e8a61d-e4e3-4510-b209-7d6fb5b02e2b\") " pod="openstack/tempest-tests-tempest" Sep 29 19:47:23 crc kubenswrapper[4792]: I0929 19:47:23.307350 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/49e8a61d-e4e3-4510-b209-7d6fb5b02e2b-config-data\") pod \"tempest-tests-tempest\" (UID: \"49e8a61d-e4e3-4510-b209-7d6fb5b02e2b\") " pod="openstack/tempest-tests-tempest" Sep 29 19:47:23 crc kubenswrapper[4792]: I0929 19:47:23.307373 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/49e8a61d-e4e3-4510-b209-7d6fb5b02e2b-ca-certs\") pod \"tempest-tests-tempest\" (UID: \"49e8a61d-e4e3-4510-b209-7d6fb5b02e2b\") " pod="openstack/tempest-tests-tempest" Sep 29 19:47:23 crc kubenswrapper[4792]: I0929 19:47:23.307445 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/49e8a61d-e4e3-4510-b209-7d6fb5b02e2b-test-operator-ephemeral-workdir\") pod \"tempest-tests-tempest\" (UID: \"49e8a61d-e4e3-4510-b209-7d6fb5b02e2b\") " pod="openstack/tempest-tests-tempest" Sep 29 19:47:23 crc kubenswrapper[4792]: I0929 19:47:23.307500 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"tempest-tests-tempest\" (UID: \"49e8a61d-e4e3-4510-b209-7d6fb5b02e2b\") " pod="openstack/tempest-tests-tempest" Sep 29 19:47:23 crc kubenswrapper[4792]: I0929 19:47:23.307521 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/49e8a61d-e4e3-4510-b209-7d6fb5b02e2b-ssh-key\") pod \"tempest-tests-tempest\" (UID: \"49e8a61d-e4e3-4510-b209-7d6fb5b02e2b\") " pod="openstack/tempest-tests-tempest" Sep 29 19:47:23 crc kubenswrapper[4792]: I0929 19:47:23.307540 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/49e8a61d-e4e3-4510-b209-7d6fb5b02e2b-test-operator-ephemeral-temporary\") pod \"tempest-tests-tempest\" (UID: \"49e8a61d-e4e3-4510-b209-7d6fb5b02e2b\") " pod="openstack/tempest-tests-tempest" Sep 29 19:47:23 crc kubenswrapper[4792]: I0929 19:47:23.307573 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/49e8a61d-e4e3-4510-b209-7d6fb5b02e2b-openstack-config\") pod \"tempest-tests-tempest\" (UID: \"49e8a61d-e4e3-4510-b209-7d6fb5b02e2b\") " pod="openstack/tempest-tests-tempest" Sep 29 19:47:23 crc kubenswrapper[4792]: I0929 19:47:23.307597 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/49e8a61d-e4e3-4510-b209-7d6fb5b02e2b-openstack-config-secret\") pod \"tempest-tests-tempest\" (UID: \"49e8a61d-e4e3-4510-b209-7d6fb5b02e2b\") " pod="openstack/tempest-tests-tempest" Sep 29 19:47:23 crc kubenswrapper[4792]: I0929 19:47:23.308041 4792 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"tempest-tests-tempest\" (UID: \"49e8a61d-e4e3-4510-b209-7d6fb5b02e2b\") device mount path \"/mnt/openstack/pv07\"" pod="openstack/tempest-tests-tempest" Sep 29 19:47:23 crc kubenswrapper[4792]: I0929 19:47:23.310739 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/49e8a61d-e4e3-4510-b209-7d6fb5b02e2b-openstack-config\") pod \"tempest-tests-tempest\" (UID: \"49e8a61d-e4e3-4510-b209-7d6fb5b02e2b\") " pod="openstack/tempest-tests-tempest" Sep 29 19:47:23 crc kubenswrapper[4792]: I0929 19:47:23.312139 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/49e8a61d-e4e3-4510-b209-7d6fb5b02e2b-test-operator-ephemeral-temporary\") pod \"tempest-tests-tempest\" (UID: \"49e8a61d-e4e3-4510-b209-7d6fb5b02e2b\") " pod="openstack/tempest-tests-tempest" Sep 29 19:47:23 crc kubenswrapper[4792]: I0929 19:47:23.312453 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/49e8a61d-e4e3-4510-b209-7d6fb5b02e2b-config-data\") pod \"tempest-tests-tempest\" (UID: \"49e8a61d-e4e3-4510-b209-7d6fb5b02e2b\") " pod="openstack/tempest-tests-tempest" Sep 29 19:47:23 crc kubenswrapper[4792]: I0929 19:47:23.312492 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/49e8a61d-e4e3-4510-b209-7d6fb5b02e2b-test-operator-ephemeral-workdir\") pod \"tempest-tests-tempest\" (UID: \"49e8a61d-e4e3-4510-b209-7d6fb5b02e2b\") " pod="openstack/tempest-tests-tempest" Sep 29 19:47:23 crc kubenswrapper[4792]: I0929 19:47:23.317053 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/49e8a61d-e4e3-4510-b209-7d6fb5b02e2b-ssh-key\") pod \"tempest-tests-tempest\" (UID: \"49e8a61d-e4e3-4510-b209-7d6fb5b02e2b\") " pod="openstack/tempest-tests-tempest" Sep 29 19:47:23 crc kubenswrapper[4792]: I0929 19:47:23.317842 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/49e8a61d-e4e3-4510-b209-7d6fb5b02e2b-ca-certs\") pod \"tempest-tests-tempest\" (UID: \"49e8a61d-e4e3-4510-b209-7d6fb5b02e2b\") " pod="openstack/tempest-tests-tempest" Sep 29 19:47:23 crc kubenswrapper[4792]: I0929 19:47:23.320277 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/49e8a61d-e4e3-4510-b209-7d6fb5b02e2b-openstack-config-secret\") pod \"tempest-tests-tempest\" (UID: \"49e8a61d-e4e3-4510-b209-7d6fb5b02e2b\") " pod="openstack/tempest-tests-tempest" Sep 29 19:47:23 crc kubenswrapper[4792]: I0929 19:47:23.333755 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hs88s\" (UniqueName: \"kubernetes.io/projected/49e8a61d-e4e3-4510-b209-7d6fb5b02e2b-kube-api-access-hs88s\") pod \"tempest-tests-tempest\" (UID: \"49e8a61d-e4e3-4510-b209-7d6fb5b02e2b\") " pod="openstack/tempest-tests-tempest" Sep 29 19:47:23 crc kubenswrapper[4792]: I0929 19:47:23.335307 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"tempest-tests-tempest\" (UID: \"49e8a61d-e4e3-4510-b209-7d6fb5b02e2b\") " pod="openstack/tempest-tests-tempest" Sep 29 19:47:23 crc kubenswrapper[4792]: I0929 19:47:23.443695 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/tempest-tests-tempest" Sep 29 19:47:23 crc kubenswrapper[4792]: I0929 19:47:23.897768 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/tempest-tests-tempest"] Sep 29 19:47:24 crc kubenswrapper[4792]: I0929 19:47:24.037492 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tempest-tests-tempest" event={"ID":"49e8a61d-e4e3-4510-b209-7d6fb5b02e2b","Type":"ContainerStarted","Data":"0b8f68f37978451ee1299fec315a7600236357a1383924e78f052c6678cea3a5"} Sep 29 19:47:38 crc kubenswrapper[4792]: I0929 19:47:38.247299 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-57s59"] Sep 29 19:47:38 crc kubenswrapper[4792]: I0929 19:47:38.262557 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-57s59" Sep 29 19:47:38 crc kubenswrapper[4792]: I0929 19:47:38.291785 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fad8707d-9a6a-473d-b351-5ea3962e43dc-catalog-content\") pod \"redhat-operators-57s59\" (UID: \"fad8707d-9a6a-473d-b351-5ea3962e43dc\") " pod="openshift-marketplace/redhat-operators-57s59" Sep 29 19:47:38 crc kubenswrapper[4792]: I0929 19:47:38.291944 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m6kdm\" (UniqueName: \"kubernetes.io/projected/fad8707d-9a6a-473d-b351-5ea3962e43dc-kube-api-access-m6kdm\") pod \"redhat-operators-57s59\" (UID: \"fad8707d-9a6a-473d-b351-5ea3962e43dc\") " pod="openshift-marketplace/redhat-operators-57s59" Sep 29 19:47:38 crc kubenswrapper[4792]: I0929 19:47:38.292128 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fad8707d-9a6a-473d-b351-5ea3962e43dc-utilities\") pod \"redhat-operators-57s59\" (UID: \"fad8707d-9a6a-473d-b351-5ea3962e43dc\") " pod="openshift-marketplace/redhat-operators-57s59" Sep 29 19:47:38 crc kubenswrapper[4792]: I0929 19:47:38.292631 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-57s59"] Sep 29 19:47:38 crc kubenswrapper[4792]: I0929 19:47:38.393943 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fad8707d-9a6a-473d-b351-5ea3962e43dc-utilities\") pod \"redhat-operators-57s59\" (UID: \"fad8707d-9a6a-473d-b351-5ea3962e43dc\") " pod="openshift-marketplace/redhat-operators-57s59" Sep 29 19:47:38 crc kubenswrapper[4792]: I0929 19:47:38.394109 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fad8707d-9a6a-473d-b351-5ea3962e43dc-catalog-content\") pod \"redhat-operators-57s59\" (UID: \"fad8707d-9a6a-473d-b351-5ea3962e43dc\") " pod="openshift-marketplace/redhat-operators-57s59" Sep 29 19:47:38 crc kubenswrapper[4792]: I0929 19:47:38.394170 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m6kdm\" (UniqueName: \"kubernetes.io/projected/fad8707d-9a6a-473d-b351-5ea3962e43dc-kube-api-access-m6kdm\") pod \"redhat-operators-57s59\" (UID: \"fad8707d-9a6a-473d-b351-5ea3962e43dc\") " pod="openshift-marketplace/redhat-operators-57s59" Sep 29 19:47:38 crc kubenswrapper[4792]: I0929 19:47:38.395067 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fad8707d-9a6a-473d-b351-5ea3962e43dc-utilities\") pod \"redhat-operators-57s59\" (UID: \"fad8707d-9a6a-473d-b351-5ea3962e43dc\") " pod="openshift-marketplace/redhat-operators-57s59" Sep 29 19:47:38 crc kubenswrapper[4792]: I0929 19:47:38.395330 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fad8707d-9a6a-473d-b351-5ea3962e43dc-catalog-content\") pod \"redhat-operators-57s59\" (UID: \"fad8707d-9a6a-473d-b351-5ea3962e43dc\") " pod="openshift-marketplace/redhat-operators-57s59" Sep 29 19:47:38 crc kubenswrapper[4792]: I0929 19:47:38.430936 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m6kdm\" (UniqueName: \"kubernetes.io/projected/fad8707d-9a6a-473d-b351-5ea3962e43dc-kube-api-access-m6kdm\") pod \"redhat-operators-57s59\" (UID: \"fad8707d-9a6a-473d-b351-5ea3962e43dc\") " pod="openshift-marketplace/redhat-operators-57s59" Sep 29 19:47:38 crc kubenswrapper[4792]: I0929 19:47:38.599799 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-57s59" Sep 29 19:47:41 crc kubenswrapper[4792]: I0929 19:47:41.959438 4792 patch_prober.go:28] interesting pod/machine-config-daemon-p5q59 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 19:47:41 crc kubenswrapper[4792]: I0929 19:47:41.959775 4792 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" podUID="0ae66548-086e-4ca9-bd6f-281ce46e7557" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 19:47:58 crc kubenswrapper[4792]: E0929 19:47:58.895111 4792 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-tempest-all:current-podified" Sep 29 19:47:58 crc kubenswrapper[4792]: E0929 19:47:58.919916 4792 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:tempest-tests-tempest-tests-runner,Image:quay.io/podified-antelope-centos9/openstack-tempest-all:current-podified,Command:[],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:test-operator-ephemeral-workdir,ReadOnly:false,MountPath:/var/lib/tempest,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:test-operator-ephemeral-temporary,ReadOnly:false,MountPath:/tmp,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:false,MountPath:/etc/test_operator,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:test-operator-logs,ReadOnly:false,MountPath:/var/lib/tempest/external_files,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:openstack-config,ReadOnly:true,MountPath:/etc/openstack/clouds.yaml,SubPath:clouds.yaml,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:openstack-config,ReadOnly:true,MountPath:/var/lib/tempest/.config/openstack/clouds.yaml,SubPath:clouds.yaml,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:openstack-config-secret,ReadOnly:false,MountPath:/etc/openstack/secure.yaml,SubPath:secure.yaml,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ca-certs,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ssh-key,ReadOnly:false,MountPath:/var/lib/tempest/id_ecdsa,SubPath:ssh_key,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-hs88s,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*42480,RunAsNonRoot:*false,ReadOnlyRootFilesystem:*false,AllowPrivilegeEscalation:*true,RunAsGroup:*42480,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{EnvFromSource{Prefix:,ConfigMapRef:&ConfigMapEnvSource{LocalObjectReference:LocalObjectReference{Name:tempest-tests-tempest-custom-data-s0,},Optional:nil,},SecretRef:nil,},EnvFromSource{Prefix:,ConfigMapRef:&ConfigMapEnvSource{LocalObjectReference:LocalObjectReference{Name:tempest-tests-tempest-env-vars-s0,},Optional:nil,},SecretRef:nil,},},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod tempest-tests-tempest_openstack(49e8a61d-e4e3-4510-b209-7d6fb5b02e2b): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Sep 29 19:47:58 crc kubenswrapper[4792]: E0929 19:47:58.921958 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"tempest-tests-tempest-tests-runner\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/tempest-tests-tempest" podUID="49e8a61d-e4e3-4510-b209-7d6fb5b02e2b" Sep 29 19:47:59 crc kubenswrapper[4792]: I0929 19:47:59.355408 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-57s59"] Sep 29 19:47:59 crc kubenswrapper[4792]: I0929 19:47:59.378480 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-57s59" event={"ID":"fad8707d-9a6a-473d-b351-5ea3962e43dc","Type":"ContainerStarted","Data":"df49a18a4d300160f99f8eb0d98995b80dd7c911feeda7986e415cbcec868210"} Sep 29 19:47:59 crc kubenswrapper[4792]: E0929 19:47:59.381112 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"tempest-tests-tempest-tests-runner\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-tempest-all:current-podified\\\"\"" pod="openstack/tempest-tests-tempest" podUID="49e8a61d-e4e3-4510-b209-7d6fb5b02e2b" Sep 29 19:48:00 crc kubenswrapper[4792]: I0929 19:48:00.406373 4792 generic.go:334] "Generic (PLEG): container finished" podID="fad8707d-9a6a-473d-b351-5ea3962e43dc" containerID="e716009ed89343ee6ed91d8f140c61776fe6308e31c780947d4c5bacda8f7c40" exitCode=0 Sep 29 19:48:00 crc kubenswrapper[4792]: I0929 19:48:00.406448 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-57s59" event={"ID":"fad8707d-9a6a-473d-b351-5ea3962e43dc","Type":"ContainerDied","Data":"e716009ed89343ee6ed91d8f140c61776fe6308e31c780947d4c5bacda8f7c40"} Sep 29 19:48:00 crc kubenswrapper[4792]: I0929 19:48:00.414763 4792 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Sep 29 19:48:02 crc kubenswrapper[4792]: I0929 19:48:02.433188 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-57s59" event={"ID":"fad8707d-9a6a-473d-b351-5ea3962e43dc","Type":"ContainerStarted","Data":"55162cb1edd11c7b3079a19db99c50f204f27fd698b81acc3c58b3f087a3b3e0"} Sep 29 19:48:07 crc kubenswrapper[4792]: I0929 19:48:07.492749 4792 generic.go:334] "Generic (PLEG): container finished" podID="fad8707d-9a6a-473d-b351-5ea3962e43dc" containerID="55162cb1edd11c7b3079a19db99c50f204f27fd698b81acc3c58b3f087a3b3e0" exitCode=0 Sep 29 19:48:07 crc kubenswrapper[4792]: I0929 19:48:07.492820 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-57s59" event={"ID":"fad8707d-9a6a-473d-b351-5ea3962e43dc","Type":"ContainerDied","Data":"55162cb1edd11c7b3079a19db99c50f204f27fd698b81acc3c58b3f087a3b3e0"} Sep 29 19:48:08 crc kubenswrapper[4792]: I0929 19:48:08.503256 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-57s59" event={"ID":"fad8707d-9a6a-473d-b351-5ea3962e43dc","Type":"ContainerStarted","Data":"00bde8ca6515aab48a211b67066433daa30bfe51e93195fc19d49c10e4626c2f"} Sep 29 19:48:08 crc kubenswrapper[4792]: I0929 19:48:08.525361 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-57s59" podStartSLOduration=22.74131874 podStartE2EDuration="30.525340648s" podCreationTimestamp="2025-09-29 19:47:38 +0000 UTC" firstStartedPulling="2025-09-29 19:48:00.413805745 +0000 UTC m=+3092.407113171" lastFinishedPulling="2025-09-29 19:48:08.197827663 +0000 UTC m=+3100.191135079" observedRunningTime="2025-09-29 19:48:08.523102309 +0000 UTC m=+3100.516409715" watchObservedRunningTime="2025-09-29 19:48:08.525340648 +0000 UTC m=+3100.518648044" Sep 29 19:48:08 crc kubenswrapper[4792]: I0929 19:48:08.600722 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-57s59" Sep 29 19:48:08 crc kubenswrapper[4792]: I0929 19:48:08.601059 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-57s59" Sep 29 19:48:09 crc kubenswrapper[4792]: I0929 19:48:09.647716 4792 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-57s59" podUID="fad8707d-9a6a-473d-b351-5ea3962e43dc" containerName="registry-server" probeResult="failure" output=< Sep 29 19:48:09 crc kubenswrapper[4792]: timeout: failed to connect service ":50051" within 1s Sep 29 19:48:09 crc kubenswrapper[4792]: > Sep 29 19:48:11 crc kubenswrapper[4792]: I0929 19:48:11.960496 4792 patch_prober.go:28] interesting pod/machine-config-daemon-p5q59 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 19:48:11 crc kubenswrapper[4792]: I0929 19:48:11.960926 4792 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" podUID="0ae66548-086e-4ca9-bd6f-281ce46e7557" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 19:48:12 crc kubenswrapper[4792]: I0929 19:48:12.598351 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"tempest-tests-tempest-env-vars-s0" Sep 29 19:48:14 crc kubenswrapper[4792]: I0929 19:48:14.554667 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tempest-tests-tempest" event={"ID":"49e8a61d-e4e3-4510-b209-7d6fb5b02e2b","Type":"ContainerStarted","Data":"b070da4a6377ec4d174ed71129e854c4d41dc4b91810f01aa80c6cc25a3a689f"} Sep 29 19:48:14 crc kubenswrapper[4792]: I0929 19:48:14.574456 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/tempest-tests-tempest" podStartSLOduration=3.887463431 podStartE2EDuration="52.574438833s" podCreationTimestamp="2025-09-29 19:47:22 +0000 UTC" firstStartedPulling="2025-09-29 19:47:23.909009091 +0000 UTC m=+3055.902316497" lastFinishedPulling="2025-09-29 19:48:12.595984503 +0000 UTC m=+3104.589291899" observedRunningTime="2025-09-29 19:48:14.57433007 +0000 UTC m=+3106.567637506" watchObservedRunningTime="2025-09-29 19:48:14.574438833 +0000 UTC m=+3106.567746229" Sep 29 19:48:18 crc kubenswrapper[4792]: I0929 19:48:18.643439 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-57s59" Sep 29 19:48:18 crc kubenswrapper[4792]: I0929 19:48:18.694623 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-57s59" Sep 29 19:48:18 crc kubenswrapper[4792]: I0929 19:48:18.884037 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-57s59"] Sep 29 19:48:20 crc kubenswrapper[4792]: I0929 19:48:20.604918 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-57s59" podUID="fad8707d-9a6a-473d-b351-5ea3962e43dc" containerName="registry-server" containerID="cri-o://00bde8ca6515aab48a211b67066433daa30bfe51e93195fc19d49c10e4626c2f" gracePeriod=2 Sep 29 19:48:21 crc kubenswrapper[4792]: I0929 19:48:21.045018 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-57s59" Sep 29 19:48:21 crc kubenswrapper[4792]: I0929 19:48:21.151020 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fad8707d-9a6a-473d-b351-5ea3962e43dc-utilities\") pod \"fad8707d-9a6a-473d-b351-5ea3962e43dc\" (UID: \"fad8707d-9a6a-473d-b351-5ea3962e43dc\") " Sep 29 19:48:21 crc kubenswrapper[4792]: I0929 19:48:21.151182 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-m6kdm\" (UniqueName: \"kubernetes.io/projected/fad8707d-9a6a-473d-b351-5ea3962e43dc-kube-api-access-m6kdm\") pod \"fad8707d-9a6a-473d-b351-5ea3962e43dc\" (UID: \"fad8707d-9a6a-473d-b351-5ea3962e43dc\") " Sep 29 19:48:21 crc kubenswrapper[4792]: I0929 19:48:21.151335 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fad8707d-9a6a-473d-b351-5ea3962e43dc-catalog-content\") pod \"fad8707d-9a6a-473d-b351-5ea3962e43dc\" (UID: \"fad8707d-9a6a-473d-b351-5ea3962e43dc\") " Sep 29 19:48:21 crc kubenswrapper[4792]: I0929 19:48:21.151702 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fad8707d-9a6a-473d-b351-5ea3962e43dc-utilities" (OuterVolumeSpecName: "utilities") pod "fad8707d-9a6a-473d-b351-5ea3962e43dc" (UID: "fad8707d-9a6a-473d-b351-5ea3962e43dc"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 19:48:21 crc kubenswrapper[4792]: I0929 19:48:21.152073 4792 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fad8707d-9a6a-473d-b351-5ea3962e43dc-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 19:48:21 crc kubenswrapper[4792]: I0929 19:48:21.171055 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fad8707d-9a6a-473d-b351-5ea3962e43dc-kube-api-access-m6kdm" (OuterVolumeSpecName: "kube-api-access-m6kdm") pod "fad8707d-9a6a-473d-b351-5ea3962e43dc" (UID: "fad8707d-9a6a-473d-b351-5ea3962e43dc"). InnerVolumeSpecName "kube-api-access-m6kdm". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:48:21 crc kubenswrapper[4792]: I0929 19:48:21.246468 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fad8707d-9a6a-473d-b351-5ea3962e43dc-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "fad8707d-9a6a-473d-b351-5ea3962e43dc" (UID: "fad8707d-9a6a-473d-b351-5ea3962e43dc"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 19:48:21 crc kubenswrapper[4792]: I0929 19:48:21.253597 4792 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fad8707d-9a6a-473d-b351-5ea3962e43dc-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 19:48:21 crc kubenswrapper[4792]: I0929 19:48:21.253632 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-m6kdm\" (UniqueName: \"kubernetes.io/projected/fad8707d-9a6a-473d-b351-5ea3962e43dc-kube-api-access-m6kdm\") on node \"crc\" DevicePath \"\"" Sep 29 19:48:21 crc kubenswrapper[4792]: I0929 19:48:21.619518 4792 generic.go:334] "Generic (PLEG): container finished" podID="fad8707d-9a6a-473d-b351-5ea3962e43dc" containerID="00bde8ca6515aab48a211b67066433daa30bfe51e93195fc19d49c10e4626c2f" exitCode=0 Sep 29 19:48:21 crc kubenswrapper[4792]: I0929 19:48:21.619577 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-57s59" Sep 29 19:48:21 crc kubenswrapper[4792]: I0929 19:48:21.619595 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-57s59" event={"ID":"fad8707d-9a6a-473d-b351-5ea3962e43dc","Type":"ContainerDied","Data":"00bde8ca6515aab48a211b67066433daa30bfe51e93195fc19d49c10e4626c2f"} Sep 29 19:48:21 crc kubenswrapper[4792]: I0929 19:48:21.620568 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-57s59" event={"ID":"fad8707d-9a6a-473d-b351-5ea3962e43dc","Type":"ContainerDied","Data":"df49a18a4d300160f99f8eb0d98995b80dd7c911feeda7986e415cbcec868210"} Sep 29 19:48:21 crc kubenswrapper[4792]: I0929 19:48:21.620587 4792 scope.go:117] "RemoveContainer" containerID="00bde8ca6515aab48a211b67066433daa30bfe51e93195fc19d49c10e4626c2f" Sep 29 19:48:21 crc kubenswrapper[4792]: I0929 19:48:21.655572 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-57s59"] Sep 29 19:48:21 crc kubenswrapper[4792]: I0929 19:48:21.663051 4792 scope.go:117] "RemoveContainer" containerID="55162cb1edd11c7b3079a19db99c50f204f27fd698b81acc3c58b3f087a3b3e0" Sep 29 19:48:21 crc kubenswrapper[4792]: I0929 19:48:21.667054 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-57s59"] Sep 29 19:48:21 crc kubenswrapper[4792]: I0929 19:48:21.698241 4792 scope.go:117] "RemoveContainer" containerID="e716009ed89343ee6ed91d8f140c61776fe6308e31c780947d4c5bacda8f7c40" Sep 29 19:48:21 crc kubenswrapper[4792]: I0929 19:48:21.726336 4792 scope.go:117] "RemoveContainer" containerID="00bde8ca6515aab48a211b67066433daa30bfe51e93195fc19d49c10e4626c2f" Sep 29 19:48:21 crc kubenswrapper[4792]: E0929 19:48:21.726959 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"00bde8ca6515aab48a211b67066433daa30bfe51e93195fc19d49c10e4626c2f\": container with ID starting with 00bde8ca6515aab48a211b67066433daa30bfe51e93195fc19d49c10e4626c2f not found: ID does not exist" containerID="00bde8ca6515aab48a211b67066433daa30bfe51e93195fc19d49c10e4626c2f" Sep 29 19:48:21 crc kubenswrapper[4792]: I0929 19:48:21.727002 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"00bde8ca6515aab48a211b67066433daa30bfe51e93195fc19d49c10e4626c2f"} err="failed to get container status \"00bde8ca6515aab48a211b67066433daa30bfe51e93195fc19d49c10e4626c2f\": rpc error: code = NotFound desc = could not find container \"00bde8ca6515aab48a211b67066433daa30bfe51e93195fc19d49c10e4626c2f\": container with ID starting with 00bde8ca6515aab48a211b67066433daa30bfe51e93195fc19d49c10e4626c2f not found: ID does not exist" Sep 29 19:48:21 crc kubenswrapper[4792]: I0929 19:48:21.727031 4792 scope.go:117] "RemoveContainer" containerID="55162cb1edd11c7b3079a19db99c50f204f27fd698b81acc3c58b3f087a3b3e0" Sep 29 19:48:21 crc kubenswrapper[4792]: E0929 19:48:21.727288 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"55162cb1edd11c7b3079a19db99c50f204f27fd698b81acc3c58b3f087a3b3e0\": container with ID starting with 55162cb1edd11c7b3079a19db99c50f204f27fd698b81acc3c58b3f087a3b3e0 not found: ID does not exist" containerID="55162cb1edd11c7b3079a19db99c50f204f27fd698b81acc3c58b3f087a3b3e0" Sep 29 19:48:21 crc kubenswrapper[4792]: I0929 19:48:21.727318 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"55162cb1edd11c7b3079a19db99c50f204f27fd698b81acc3c58b3f087a3b3e0"} err="failed to get container status \"55162cb1edd11c7b3079a19db99c50f204f27fd698b81acc3c58b3f087a3b3e0\": rpc error: code = NotFound desc = could not find container \"55162cb1edd11c7b3079a19db99c50f204f27fd698b81acc3c58b3f087a3b3e0\": container with ID starting with 55162cb1edd11c7b3079a19db99c50f204f27fd698b81acc3c58b3f087a3b3e0 not found: ID does not exist" Sep 29 19:48:21 crc kubenswrapper[4792]: I0929 19:48:21.727339 4792 scope.go:117] "RemoveContainer" containerID="e716009ed89343ee6ed91d8f140c61776fe6308e31c780947d4c5bacda8f7c40" Sep 29 19:48:21 crc kubenswrapper[4792]: E0929 19:48:21.727545 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e716009ed89343ee6ed91d8f140c61776fe6308e31c780947d4c5bacda8f7c40\": container with ID starting with e716009ed89343ee6ed91d8f140c61776fe6308e31c780947d4c5bacda8f7c40 not found: ID does not exist" containerID="e716009ed89343ee6ed91d8f140c61776fe6308e31c780947d4c5bacda8f7c40" Sep 29 19:48:21 crc kubenswrapper[4792]: I0929 19:48:21.727570 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e716009ed89343ee6ed91d8f140c61776fe6308e31c780947d4c5bacda8f7c40"} err="failed to get container status \"e716009ed89343ee6ed91d8f140c61776fe6308e31c780947d4c5bacda8f7c40\": rpc error: code = NotFound desc = could not find container \"e716009ed89343ee6ed91d8f140c61776fe6308e31c780947d4c5bacda8f7c40\": container with ID starting with e716009ed89343ee6ed91d8f140c61776fe6308e31c780947d4c5bacda8f7c40 not found: ID does not exist" Sep 29 19:48:23 crc kubenswrapper[4792]: I0929 19:48:23.026098 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fad8707d-9a6a-473d-b351-5ea3962e43dc" path="/var/lib/kubelet/pods/fad8707d-9a6a-473d-b351-5ea3962e43dc/volumes" Sep 29 19:48:41 crc kubenswrapper[4792]: I0929 19:48:41.960132 4792 patch_prober.go:28] interesting pod/machine-config-daemon-p5q59 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 19:48:41 crc kubenswrapper[4792]: I0929 19:48:41.960642 4792 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" podUID="0ae66548-086e-4ca9-bd6f-281ce46e7557" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 19:48:41 crc kubenswrapper[4792]: I0929 19:48:41.960681 4792 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" Sep 29 19:48:41 crc kubenswrapper[4792]: I0929 19:48:41.961255 4792 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"b15932957497b7a32079aab059d55a779c760572378662950e68542536286e36"} pod="openshift-machine-config-operator/machine-config-daemon-p5q59" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 29 19:48:41 crc kubenswrapper[4792]: I0929 19:48:41.961492 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" podUID="0ae66548-086e-4ca9-bd6f-281ce46e7557" containerName="machine-config-daemon" containerID="cri-o://b15932957497b7a32079aab059d55a779c760572378662950e68542536286e36" gracePeriod=600 Sep 29 19:48:42 crc kubenswrapper[4792]: E0929 19:48:42.084030 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p5q59_openshift-machine-config-operator(0ae66548-086e-4ca9-bd6f-281ce46e7557)\"" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" podUID="0ae66548-086e-4ca9-bd6f-281ce46e7557" Sep 29 19:48:42 crc kubenswrapper[4792]: I0929 19:48:42.817664 4792 generic.go:334] "Generic (PLEG): container finished" podID="0ae66548-086e-4ca9-bd6f-281ce46e7557" containerID="b15932957497b7a32079aab059d55a779c760572378662950e68542536286e36" exitCode=0 Sep 29 19:48:42 crc kubenswrapper[4792]: I0929 19:48:42.817749 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" event={"ID":"0ae66548-086e-4ca9-bd6f-281ce46e7557","Type":"ContainerDied","Data":"b15932957497b7a32079aab059d55a779c760572378662950e68542536286e36"} Sep 29 19:48:42 crc kubenswrapper[4792]: I0929 19:48:42.817971 4792 scope.go:117] "RemoveContainer" containerID="c6365e5183d9e12a4ecc8cc816993eb646b458526ff16370ed5f7570dceeeb3e" Sep 29 19:48:42 crc kubenswrapper[4792]: I0929 19:48:42.818528 4792 scope.go:117] "RemoveContainer" containerID="b15932957497b7a32079aab059d55a779c760572378662950e68542536286e36" Sep 29 19:48:42 crc kubenswrapper[4792]: E0929 19:48:42.818947 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p5q59_openshift-machine-config-operator(0ae66548-086e-4ca9-bd6f-281ce46e7557)\"" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" podUID="0ae66548-086e-4ca9-bd6f-281ce46e7557" Sep 29 19:48:56 crc kubenswrapper[4792]: I0929 19:48:56.016740 4792 scope.go:117] "RemoveContainer" containerID="b15932957497b7a32079aab059d55a779c760572378662950e68542536286e36" Sep 29 19:48:56 crc kubenswrapper[4792]: E0929 19:48:56.017793 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p5q59_openshift-machine-config-operator(0ae66548-086e-4ca9-bd6f-281ce46e7557)\"" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" podUID="0ae66548-086e-4ca9-bd6f-281ce46e7557" Sep 29 19:49:11 crc kubenswrapper[4792]: I0929 19:49:11.015983 4792 scope.go:117] "RemoveContainer" containerID="b15932957497b7a32079aab059d55a779c760572378662950e68542536286e36" Sep 29 19:49:11 crc kubenswrapper[4792]: E0929 19:49:11.016820 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p5q59_openshift-machine-config-operator(0ae66548-086e-4ca9-bd6f-281ce46e7557)\"" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" podUID="0ae66548-086e-4ca9-bd6f-281ce46e7557" Sep 29 19:49:22 crc kubenswrapper[4792]: I0929 19:49:22.016123 4792 scope.go:117] "RemoveContainer" containerID="b15932957497b7a32079aab059d55a779c760572378662950e68542536286e36" Sep 29 19:49:22 crc kubenswrapper[4792]: E0929 19:49:22.016787 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p5q59_openshift-machine-config-operator(0ae66548-086e-4ca9-bd6f-281ce46e7557)\"" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" podUID="0ae66548-086e-4ca9-bd6f-281ce46e7557" Sep 29 19:49:33 crc kubenswrapper[4792]: I0929 19:49:33.015464 4792 scope.go:117] "RemoveContainer" containerID="b15932957497b7a32079aab059d55a779c760572378662950e68542536286e36" Sep 29 19:49:33 crc kubenswrapper[4792]: E0929 19:49:33.016260 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p5q59_openshift-machine-config-operator(0ae66548-086e-4ca9-bd6f-281ce46e7557)\"" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" podUID="0ae66548-086e-4ca9-bd6f-281ce46e7557" Sep 29 19:49:46 crc kubenswrapper[4792]: I0929 19:49:46.015957 4792 scope.go:117] "RemoveContainer" containerID="b15932957497b7a32079aab059d55a779c760572378662950e68542536286e36" Sep 29 19:49:46 crc kubenswrapper[4792]: E0929 19:49:46.017616 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p5q59_openshift-machine-config-operator(0ae66548-086e-4ca9-bd6f-281ce46e7557)\"" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" podUID="0ae66548-086e-4ca9-bd6f-281ce46e7557" Sep 29 19:49:59 crc kubenswrapper[4792]: I0929 19:49:59.025414 4792 scope.go:117] "RemoveContainer" containerID="b15932957497b7a32079aab059d55a779c760572378662950e68542536286e36" Sep 29 19:49:59 crc kubenswrapper[4792]: E0929 19:49:59.026128 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p5q59_openshift-machine-config-operator(0ae66548-086e-4ca9-bd6f-281ce46e7557)\"" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" podUID="0ae66548-086e-4ca9-bd6f-281ce46e7557" Sep 29 19:50:10 crc kubenswrapper[4792]: I0929 19:50:10.014882 4792 scope.go:117] "RemoveContainer" containerID="b15932957497b7a32079aab059d55a779c760572378662950e68542536286e36" Sep 29 19:50:10 crc kubenswrapper[4792]: E0929 19:50:10.015711 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p5q59_openshift-machine-config-operator(0ae66548-086e-4ca9-bd6f-281ce46e7557)\"" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" podUID="0ae66548-086e-4ca9-bd6f-281ce46e7557" Sep 29 19:50:22 crc kubenswrapper[4792]: I0929 19:50:22.014986 4792 scope.go:117] "RemoveContainer" containerID="b15932957497b7a32079aab059d55a779c760572378662950e68542536286e36" Sep 29 19:50:22 crc kubenswrapper[4792]: E0929 19:50:22.015799 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p5q59_openshift-machine-config-operator(0ae66548-086e-4ca9-bd6f-281ce46e7557)\"" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" podUID="0ae66548-086e-4ca9-bd6f-281ce46e7557" Sep 29 19:50:35 crc kubenswrapper[4792]: I0929 19:50:35.015958 4792 scope.go:117] "RemoveContainer" containerID="b15932957497b7a32079aab059d55a779c760572378662950e68542536286e36" Sep 29 19:50:35 crc kubenswrapper[4792]: E0929 19:50:35.016816 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p5q59_openshift-machine-config-operator(0ae66548-086e-4ca9-bd6f-281ce46e7557)\"" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" podUID="0ae66548-086e-4ca9-bd6f-281ce46e7557" Sep 29 19:50:46 crc kubenswrapper[4792]: I0929 19:50:46.016351 4792 scope.go:117] "RemoveContainer" containerID="b15932957497b7a32079aab059d55a779c760572378662950e68542536286e36" Sep 29 19:50:46 crc kubenswrapper[4792]: E0929 19:50:46.020543 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p5q59_openshift-machine-config-operator(0ae66548-086e-4ca9-bd6f-281ce46e7557)\"" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" podUID="0ae66548-086e-4ca9-bd6f-281ce46e7557" Sep 29 19:50:46 crc kubenswrapper[4792]: I0929 19:50:46.254791 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-wggxm"] Sep 29 19:50:46 crc kubenswrapper[4792]: E0929 19:50:46.255393 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fad8707d-9a6a-473d-b351-5ea3962e43dc" containerName="registry-server" Sep 29 19:50:46 crc kubenswrapper[4792]: I0929 19:50:46.255465 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="fad8707d-9a6a-473d-b351-5ea3962e43dc" containerName="registry-server" Sep 29 19:50:46 crc kubenswrapper[4792]: E0929 19:50:46.255550 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fad8707d-9a6a-473d-b351-5ea3962e43dc" containerName="extract-utilities" Sep 29 19:50:46 crc kubenswrapper[4792]: I0929 19:50:46.255611 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="fad8707d-9a6a-473d-b351-5ea3962e43dc" containerName="extract-utilities" Sep 29 19:50:46 crc kubenswrapper[4792]: E0929 19:50:46.255677 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fad8707d-9a6a-473d-b351-5ea3962e43dc" containerName="extract-content" Sep 29 19:50:46 crc kubenswrapper[4792]: I0929 19:50:46.255738 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="fad8707d-9a6a-473d-b351-5ea3962e43dc" containerName="extract-content" Sep 29 19:50:46 crc kubenswrapper[4792]: I0929 19:50:46.256011 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="fad8707d-9a6a-473d-b351-5ea3962e43dc" containerName="registry-server" Sep 29 19:50:46 crc kubenswrapper[4792]: I0929 19:50:46.257388 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-wggxm" Sep 29 19:50:46 crc kubenswrapper[4792]: I0929 19:50:46.359169 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-wggxm"] Sep 29 19:50:46 crc kubenswrapper[4792]: I0929 19:50:46.406360 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/15ee4b1c-b307-4494-877b-342b342770e2-catalog-content\") pod \"community-operators-wggxm\" (UID: \"15ee4b1c-b307-4494-877b-342b342770e2\") " pod="openshift-marketplace/community-operators-wggxm" Sep 29 19:50:46 crc kubenswrapper[4792]: I0929 19:50:46.406650 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r2wwm\" (UniqueName: \"kubernetes.io/projected/15ee4b1c-b307-4494-877b-342b342770e2-kube-api-access-r2wwm\") pod \"community-operators-wggxm\" (UID: \"15ee4b1c-b307-4494-877b-342b342770e2\") " pod="openshift-marketplace/community-operators-wggxm" Sep 29 19:50:46 crc kubenswrapper[4792]: I0929 19:50:46.407038 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/15ee4b1c-b307-4494-877b-342b342770e2-utilities\") pod \"community-operators-wggxm\" (UID: \"15ee4b1c-b307-4494-877b-342b342770e2\") " pod="openshift-marketplace/community-operators-wggxm" Sep 29 19:50:46 crc kubenswrapper[4792]: I0929 19:50:46.509363 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/15ee4b1c-b307-4494-877b-342b342770e2-utilities\") pod \"community-operators-wggxm\" (UID: \"15ee4b1c-b307-4494-877b-342b342770e2\") " pod="openshift-marketplace/community-operators-wggxm" Sep 29 19:50:46 crc kubenswrapper[4792]: I0929 19:50:46.509447 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/15ee4b1c-b307-4494-877b-342b342770e2-catalog-content\") pod \"community-operators-wggxm\" (UID: \"15ee4b1c-b307-4494-877b-342b342770e2\") " pod="openshift-marketplace/community-operators-wggxm" Sep 29 19:50:46 crc kubenswrapper[4792]: I0929 19:50:46.509512 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r2wwm\" (UniqueName: \"kubernetes.io/projected/15ee4b1c-b307-4494-877b-342b342770e2-kube-api-access-r2wwm\") pod \"community-operators-wggxm\" (UID: \"15ee4b1c-b307-4494-877b-342b342770e2\") " pod="openshift-marketplace/community-operators-wggxm" Sep 29 19:50:46 crc kubenswrapper[4792]: I0929 19:50:46.510073 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/15ee4b1c-b307-4494-877b-342b342770e2-utilities\") pod \"community-operators-wggxm\" (UID: \"15ee4b1c-b307-4494-877b-342b342770e2\") " pod="openshift-marketplace/community-operators-wggxm" Sep 29 19:50:46 crc kubenswrapper[4792]: I0929 19:50:46.510150 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/15ee4b1c-b307-4494-877b-342b342770e2-catalog-content\") pod \"community-operators-wggxm\" (UID: \"15ee4b1c-b307-4494-877b-342b342770e2\") " pod="openshift-marketplace/community-operators-wggxm" Sep 29 19:50:46 crc kubenswrapper[4792]: I0929 19:50:46.530343 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r2wwm\" (UniqueName: \"kubernetes.io/projected/15ee4b1c-b307-4494-877b-342b342770e2-kube-api-access-r2wwm\") pod \"community-operators-wggxm\" (UID: \"15ee4b1c-b307-4494-877b-342b342770e2\") " pod="openshift-marketplace/community-operators-wggxm" Sep 29 19:50:46 crc kubenswrapper[4792]: I0929 19:50:46.575909 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-wggxm" Sep 29 19:50:47 crc kubenswrapper[4792]: I0929 19:50:47.661464 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-wggxm"] Sep 29 19:50:47 crc kubenswrapper[4792]: I0929 19:50:47.942712 4792 generic.go:334] "Generic (PLEG): container finished" podID="15ee4b1c-b307-4494-877b-342b342770e2" containerID="5d838c30cfa0596ce888bf751ad40dc0c6d31abebff2e3bba35f97eb82ce72ff" exitCode=0 Sep 29 19:50:47 crc kubenswrapper[4792]: I0929 19:50:47.942825 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-wggxm" event={"ID":"15ee4b1c-b307-4494-877b-342b342770e2","Type":"ContainerDied","Data":"5d838c30cfa0596ce888bf751ad40dc0c6d31abebff2e3bba35f97eb82ce72ff"} Sep 29 19:50:47 crc kubenswrapper[4792]: I0929 19:50:47.943029 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-wggxm" event={"ID":"15ee4b1c-b307-4494-877b-342b342770e2","Type":"ContainerStarted","Data":"075855b452ec275f648ed5dd8715e7824894b4a2c8bde06f111e16dd001ca3a7"} Sep 29 19:50:48 crc kubenswrapper[4792]: I0929 19:50:48.955848 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-wggxm" event={"ID":"15ee4b1c-b307-4494-877b-342b342770e2","Type":"ContainerStarted","Data":"b9a61376a9c10c1f1105fe870b4c2deb2a513733b619217c8ed25324f7d499b2"} Sep 29 19:50:51 crc kubenswrapper[4792]: I0929 19:50:51.981466 4792 generic.go:334] "Generic (PLEG): container finished" podID="15ee4b1c-b307-4494-877b-342b342770e2" containerID="b9a61376a9c10c1f1105fe870b4c2deb2a513733b619217c8ed25324f7d499b2" exitCode=0 Sep 29 19:50:51 crc kubenswrapper[4792]: I0929 19:50:51.981581 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-wggxm" event={"ID":"15ee4b1c-b307-4494-877b-342b342770e2","Type":"ContainerDied","Data":"b9a61376a9c10c1f1105fe870b4c2deb2a513733b619217c8ed25324f7d499b2"} Sep 29 19:50:52 crc kubenswrapper[4792]: I0929 19:50:52.992957 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-wggxm" event={"ID":"15ee4b1c-b307-4494-877b-342b342770e2","Type":"ContainerStarted","Data":"51599851b5e19e222fcad50d93386d4e81d2f00ab468a22ffe41a90290d85ec0"} Sep 29 19:50:53 crc kubenswrapper[4792]: I0929 19:50:53.024814 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-wggxm" podStartSLOduration=2.570023017 podStartE2EDuration="7.024793098s" podCreationTimestamp="2025-09-29 19:50:46 +0000 UTC" firstStartedPulling="2025-09-29 19:50:47.944486679 +0000 UTC m=+3259.937794075" lastFinishedPulling="2025-09-29 19:50:52.39925676 +0000 UTC m=+3264.392564156" observedRunningTime="2025-09-29 19:50:53.012982079 +0000 UTC m=+3265.006289495" watchObservedRunningTime="2025-09-29 19:50:53.024793098 +0000 UTC m=+3265.018100494" Sep 29 19:50:56 crc kubenswrapper[4792]: I0929 19:50:56.577248 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-wggxm" Sep 29 19:50:56 crc kubenswrapper[4792]: I0929 19:50:56.577901 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-wggxm" Sep 29 19:50:57 crc kubenswrapper[4792]: I0929 19:50:57.629234 4792 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/community-operators-wggxm" podUID="15ee4b1c-b307-4494-877b-342b342770e2" containerName="registry-server" probeResult="failure" output=< Sep 29 19:50:57 crc kubenswrapper[4792]: timeout: failed to connect service ":50051" within 1s Sep 29 19:50:57 crc kubenswrapper[4792]: > Sep 29 19:51:00 crc kubenswrapper[4792]: I0929 19:51:00.014938 4792 scope.go:117] "RemoveContainer" containerID="b15932957497b7a32079aab059d55a779c760572378662950e68542536286e36" Sep 29 19:51:00 crc kubenswrapper[4792]: E0929 19:51:00.015496 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p5q59_openshift-machine-config-operator(0ae66548-086e-4ca9-bd6f-281ce46e7557)\"" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" podUID="0ae66548-086e-4ca9-bd6f-281ce46e7557" Sep 29 19:51:06 crc kubenswrapper[4792]: I0929 19:51:06.636556 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-wggxm" Sep 29 19:51:06 crc kubenswrapper[4792]: I0929 19:51:06.697713 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-wggxm" Sep 29 19:51:06 crc kubenswrapper[4792]: I0929 19:51:06.882294 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-wggxm"] Sep 29 19:51:08 crc kubenswrapper[4792]: I0929 19:51:08.129216 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-wggxm" podUID="15ee4b1c-b307-4494-877b-342b342770e2" containerName="registry-server" containerID="cri-o://51599851b5e19e222fcad50d93386d4e81d2f00ab468a22ffe41a90290d85ec0" gracePeriod=2 Sep 29 19:51:08 crc kubenswrapper[4792]: I0929 19:51:08.752926 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-wggxm" Sep 29 19:51:08 crc kubenswrapper[4792]: I0929 19:51:08.931226 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/15ee4b1c-b307-4494-877b-342b342770e2-catalog-content\") pod \"15ee4b1c-b307-4494-877b-342b342770e2\" (UID: \"15ee4b1c-b307-4494-877b-342b342770e2\") " Sep 29 19:51:08 crc kubenswrapper[4792]: I0929 19:51:08.931365 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-r2wwm\" (UniqueName: \"kubernetes.io/projected/15ee4b1c-b307-4494-877b-342b342770e2-kube-api-access-r2wwm\") pod \"15ee4b1c-b307-4494-877b-342b342770e2\" (UID: \"15ee4b1c-b307-4494-877b-342b342770e2\") " Sep 29 19:51:08 crc kubenswrapper[4792]: I0929 19:51:08.931409 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/15ee4b1c-b307-4494-877b-342b342770e2-utilities\") pod \"15ee4b1c-b307-4494-877b-342b342770e2\" (UID: \"15ee4b1c-b307-4494-877b-342b342770e2\") " Sep 29 19:51:08 crc kubenswrapper[4792]: I0929 19:51:08.932398 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/15ee4b1c-b307-4494-877b-342b342770e2-utilities" (OuterVolumeSpecName: "utilities") pod "15ee4b1c-b307-4494-877b-342b342770e2" (UID: "15ee4b1c-b307-4494-877b-342b342770e2"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 19:51:08 crc kubenswrapper[4792]: I0929 19:51:08.954007 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/15ee4b1c-b307-4494-877b-342b342770e2-kube-api-access-r2wwm" (OuterVolumeSpecName: "kube-api-access-r2wwm") pod "15ee4b1c-b307-4494-877b-342b342770e2" (UID: "15ee4b1c-b307-4494-877b-342b342770e2"). InnerVolumeSpecName "kube-api-access-r2wwm". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:51:08 crc kubenswrapper[4792]: I0929 19:51:08.992987 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/15ee4b1c-b307-4494-877b-342b342770e2-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "15ee4b1c-b307-4494-877b-342b342770e2" (UID: "15ee4b1c-b307-4494-877b-342b342770e2"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 19:51:09 crc kubenswrapper[4792]: I0929 19:51:09.033563 4792 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/15ee4b1c-b307-4494-877b-342b342770e2-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 19:51:09 crc kubenswrapper[4792]: I0929 19:51:09.033752 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-r2wwm\" (UniqueName: \"kubernetes.io/projected/15ee4b1c-b307-4494-877b-342b342770e2-kube-api-access-r2wwm\") on node \"crc\" DevicePath \"\"" Sep 29 19:51:09 crc kubenswrapper[4792]: I0929 19:51:09.033767 4792 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/15ee4b1c-b307-4494-877b-342b342770e2-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 19:51:09 crc kubenswrapper[4792]: I0929 19:51:09.138345 4792 generic.go:334] "Generic (PLEG): container finished" podID="15ee4b1c-b307-4494-877b-342b342770e2" containerID="51599851b5e19e222fcad50d93386d4e81d2f00ab468a22ffe41a90290d85ec0" exitCode=0 Sep 29 19:51:09 crc kubenswrapper[4792]: I0929 19:51:09.138388 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-wggxm" event={"ID":"15ee4b1c-b307-4494-877b-342b342770e2","Type":"ContainerDied","Data":"51599851b5e19e222fcad50d93386d4e81d2f00ab468a22ffe41a90290d85ec0"} Sep 29 19:51:09 crc kubenswrapper[4792]: I0929 19:51:09.138422 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-wggxm" event={"ID":"15ee4b1c-b307-4494-877b-342b342770e2","Type":"ContainerDied","Data":"075855b452ec275f648ed5dd8715e7824894b4a2c8bde06f111e16dd001ca3a7"} Sep 29 19:51:09 crc kubenswrapper[4792]: I0929 19:51:09.138438 4792 scope.go:117] "RemoveContainer" containerID="51599851b5e19e222fcad50d93386d4e81d2f00ab468a22ffe41a90290d85ec0" Sep 29 19:51:09 crc kubenswrapper[4792]: I0929 19:51:09.138551 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-wggxm" Sep 29 19:51:09 crc kubenswrapper[4792]: I0929 19:51:09.161308 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-wggxm"] Sep 29 19:51:09 crc kubenswrapper[4792]: I0929 19:51:09.168659 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-wggxm"] Sep 29 19:51:09 crc kubenswrapper[4792]: I0929 19:51:09.171404 4792 scope.go:117] "RemoveContainer" containerID="b9a61376a9c10c1f1105fe870b4c2deb2a513733b619217c8ed25324f7d499b2" Sep 29 19:51:09 crc kubenswrapper[4792]: I0929 19:51:09.192136 4792 scope.go:117] "RemoveContainer" containerID="5d838c30cfa0596ce888bf751ad40dc0c6d31abebff2e3bba35f97eb82ce72ff" Sep 29 19:51:09 crc kubenswrapper[4792]: I0929 19:51:09.237331 4792 scope.go:117] "RemoveContainer" containerID="51599851b5e19e222fcad50d93386d4e81d2f00ab468a22ffe41a90290d85ec0" Sep 29 19:51:09 crc kubenswrapper[4792]: E0929 19:51:09.237795 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"51599851b5e19e222fcad50d93386d4e81d2f00ab468a22ffe41a90290d85ec0\": container with ID starting with 51599851b5e19e222fcad50d93386d4e81d2f00ab468a22ffe41a90290d85ec0 not found: ID does not exist" containerID="51599851b5e19e222fcad50d93386d4e81d2f00ab468a22ffe41a90290d85ec0" Sep 29 19:51:09 crc kubenswrapper[4792]: I0929 19:51:09.237833 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"51599851b5e19e222fcad50d93386d4e81d2f00ab468a22ffe41a90290d85ec0"} err="failed to get container status \"51599851b5e19e222fcad50d93386d4e81d2f00ab468a22ffe41a90290d85ec0\": rpc error: code = NotFound desc = could not find container \"51599851b5e19e222fcad50d93386d4e81d2f00ab468a22ffe41a90290d85ec0\": container with ID starting with 51599851b5e19e222fcad50d93386d4e81d2f00ab468a22ffe41a90290d85ec0 not found: ID does not exist" Sep 29 19:51:09 crc kubenswrapper[4792]: I0929 19:51:09.237883 4792 scope.go:117] "RemoveContainer" containerID="b9a61376a9c10c1f1105fe870b4c2deb2a513733b619217c8ed25324f7d499b2" Sep 29 19:51:09 crc kubenswrapper[4792]: E0929 19:51:09.238160 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b9a61376a9c10c1f1105fe870b4c2deb2a513733b619217c8ed25324f7d499b2\": container with ID starting with b9a61376a9c10c1f1105fe870b4c2deb2a513733b619217c8ed25324f7d499b2 not found: ID does not exist" containerID="b9a61376a9c10c1f1105fe870b4c2deb2a513733b619217c8ed25324f7d499b2" Sep 29 19:51:09 crc kubenswrapper[4792]: I0929 19:51:09.238195 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b9a61376a9c10c1f1105fe870b4c2deb2a513733b619217c8ed25324f7d499b2"} err="failed to get container status \"b9a61376a9c10c1f1105fe870b4c2deb2a513733b619217c8ed25324f7d499b2\": rpc error: code = NotFound desc = could not find container \"b9a61376a9c10c1f1105fe870b4c2deb2a513733b619217c8ed25324f7d499b2\": container with ID starting with b9a61376a9c10c1f1105fe870b4c2deb2a513733b619217c8ed25324f7d499b2 not found: ID does not exist" Sep 29 19:51:09 crc kubenswrapper[4792]: I0929 19:51:09.238218 4792 scope.go:117] "RemoveContainer" containerID="5d838c30cfa0596ce888bf751ad40dc0c6d31abebff2e3bba35f97eb82ce72ff" Sep 29 19:51:09 crc kubenswrapper[4792]: E0929 19:51:09.238410 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5d838c30cfa0596ce888bf751ad40dc0c6d31abebff2e3bba35f97eb82ce72ff\": container with ID starting with 5d838c30cfa0596ce888bf751ad40dc0c6d31abebff2e3bba35f97eb82ce72ff not found: ID does not exist" containerID="5d838c30cfa0596ce888bf751ad40dc0c6d31abebff2e3bba35f97eb82ce72ff" Sep 29 19:51:09 crc kubenswrapper[4792]: I0929 19:51:09.238436 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5d838c30cfa0596ce888bf751ad40dc0c6d31abebff2e3bba35f97eb82ce72ff"} err="failed to get container status \"5d838c30cfa0596ce888bf751ad40dc0c6d31abebff2e3bba35f97eb82ce72ff\": rpc error: code = NotFound desc = could not find container \"5d838c30cfa0596ce888bf751ad40dc0c6d31abebff2e3bba35f97eb82ce72ff\": container with ID starting with 5d838c30cfa0596ce888bf751ad40dc0c6d31abebff2e3bba35f97eb82ce72ff not found: ID does not exist" Sep 29 19:51:11 crc kubenswrapper[4792]: I0929 19:51:11.016005 4792 scope.go:117] "RemoveContainer" containerID="b15932957497b7a32079aab059d55a779c760572378662950e68542536286e36" Sep 29 19:51:11 crc kubenswrapper[4792]: E0929 19:51:11.016553 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p5q59_openshift-machine-config-operator(0ae66548-086e-4ca9-bd6f-281ce46e7557)\"" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" podUID="0ae66548-086e-4ca9-bd6f-281ce46e7557" Sep 29 19:51:11 crc kubenswrapper[4792]: I0929 19:51:11.028891 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="15ee4b1c-b307-4494-877b-342b342770e2" path="/var/lib/kubelet/pods/15ee4b1c-b307-4494-877b-342b342770e2/volumes" Sep 29 19:51:22 crc kubenswrapper[4792]: I0929 19:51:22.015730 4792 scope.go:117] "RemoveContainer" containerID="b15932957497b7a32079aab059d55a779c760572378662950e68542536286e36" Sep 29 19:51:22 crc kubenswrapper[4792]: E0929 19:51:22.017488 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p5q59_openshift-machine-config-operator(0ae66548-086e-4ca9-bd6f-281ce46e7557)\"" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" podUID="0ae66548-086e-4ca9-bd6f-281ce46e7557" Sep 29 19:51:33 crc kubenswrapper[4792]: I0929 19:51:33.015161 4792 scope.go:117] "RemoveContainer" containerID="b15932957497b7a32079aab059d55a779c760572378662950e68542536286e36" Sep 29 19:51:33 crc kubenswrapper[4792]: E0929 19:51:33.015868 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p5q59_openshift-machine-config-operator(0ae66548-086e-4ca9-bd6f-281ce46e7557)\"" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" podUID="0ae66548-086e-4ca9-bd6f-281ce46e7557" Sep 29 19:51:47 crc kubenswrapper[4792]: I0929 19:51:47.015793 4792 scope.go:117] "RemoveContainer" containerID="b15932957497b7a32079aab059d55a779c760572378662950e68542536286e36" Sep 29 19:51:47 crc kubenswrapper[4792]: E0929 19:51:47.016616 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p5q59_openshift-machine-config-operator(0ae66548-086e-4ca9-bd6f-281ce46e7557)\"" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" podUID="0ae66548-086e-4ca9-bd6f-281ce46e7557" Sep 29 19:52:00 crc kubenswrapper[4792]: I0929 19:52:00.015467 4792 scope.go:117] "RemoveContainer" containerID="b15932957497b7a32079aab059d55a779c760572378662950e68542536286e36" Sep 29 19:52:00 crc kubenswrapper[4792]: E0929 19:52:00.016061 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p5q59_openshift-machine-config-operator(0ae66548-086e-4ca9-bd6f-281ce46e7557)\"" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" podUID="0ae66548-086e-4ca9-bd6f-281ce46e7557" Sep 29 19:52:14 crc kubenswrapper[4792]: I0929 19:52:14.016026 4792 scope.go:117] "RemoveContainer" containerID="b15932957497b7a32079aab059d55a779c760572378662950e68542536286e36" Sep 29 19:52:14 crc kubenswrapper[4792]: E0929 19:52:14.016697 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p5q59_openshift-machine-config-operator(0ae66548-086e-4ca9-bd6f-281ce46e7557)\"" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" podUID="0ae66548-086e-4ca9-bd6f-281ce46e7557" Sep 29 19:52:25 crc kubenswrapper[4792]: I0929 19:52:25.015650 4792 scope.go:117] "RemoveContainer" containerID="b15932957497b7a32079aab059d55a779c760572378662950e68542536286e36" Sep 29 19:52:25 crc kubenswrapper[4792]: E0929 19:52:25.016375 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p5q59_openshift-machine-config-operator(0ae66548-086e-4ca9-bd6f-281ce46e7557)\"" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" podUID="0ae66548-086e-4ca9-bd6f-281ce46e7557" Sep 29 19:52:37 crc kubenswrapper[4792]: I0929 19:52:37.015433 4792 scope.go:117] "RemoveContainer" containerID="b15932957497b7a32079aab059d55a779c760572378662950e68542536286e36" Sep 29 19:52:37 crc kubenswrapper[4792]: E0929 19:52:37.016028 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p5q59_openshift-machine-config-operator(0ae66548-086e-4ca9-bd6f-281ce46e7557)\"" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" podUID="0ae66548-086e-4ca9-bd6f-281ce46e7557" Sep 29 19:52:42 crc kubenswrapper[4792]: I0929 19:52:42.223650 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-vlv5c"] Sep 29 19:52:42 crc kubenswrapper[4792]: E0929 19:52:42.224699 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="15ee4b1c-b307-4494-877b-342b342770e2" containerName="extract-content" Sep 29 19:52:42 crc kubenswrapper[4792]: I0929 19:52:42.224823 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="15ee4b1c-b307-4494-877b-342b342770e2" containerName="extract-content" Sep 29 19:52:42 crc kubenswrapper[4792]: E0929 19:52:42.224840 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="15ee4b1c-b307-4494-877b-342b342770e2" containerName="registry-server" Sep 29 19:52:42 crc kubenswrapper[4792]: I0929 19:52:42.224868 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="15ee4b1c-b307-4494-877b-342b342770e2" containerName="registry-server" Sep 29 19:52:42 crc kubenswrapper[4792]: E0929 19:52:42.224901 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="15ee4b1c-b307-4494-877b-342b342770e2" containerName="extract-utilities" Sep 29 19:52:42 crc kubenswrapper[4792]: I0929 19:52:42.224910 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="15ee4b1c-b307-4494-877b-342b342770e2" containerName="extract-utilities" Sep 29 19:52:42 crc kubenswrapper[4792]: I0929 19:52:42.225202 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="15ee4b1c-b307-4494-877b-342b342770e2" containerName="registry-server" Sep 29 19:52:42 crc kubenswrapper[4792]: I0929 19:52:42.226883 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-vlv5c" Sep 29 19:52:42 crc kubenswrapper[4792]: I0929 19:52:42.240598 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-vlv5c"] Sep 29 19:52:42 crc kubenswrapper[4792]: I0929 19:52:42.328968 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/eaac4008-c518-43e7-b49d-03b6adabd8f7-catalog-content\") pod \"redhat-marketplace-vlv5c\" (UID: \"eaac4008-c518-43e7-b49d-03b6adabd8f7\") " pod="openshift-marketplace/redhat-marketplace-vlv5c" Sep 29 19:52:42 crc kubenswrapper[4792]: I0929 19:52:42.329155 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/eaac4008-c518-43e7-b49d-03b6adabd8f7-utilities\") pod \"redhat-marketplace-vlv5c\" (UID: \"eaac4008-c518-43e7-b49d-03b6adabd8f7\") " pod="openshift-marketplace/redhat-marketplace-vlv5c" Sep 29 19:52:42 crc kubenswrapper[4792]: I0929 19:52:42.329228 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rvxld\" (UniqueName: \"kubernetes.io/projected/eaac4008-c518-43e7-b49d-03b6adabd8f7-kube-api-access-rvxld\") pod \"redhat-marketplace-vlv5c\" (UID: \"eaac4008-c518-43e7-b49d-03b6adabd8f7\") " pod="openshift-marketplace/redhat-marketplace-vlv5c" Sep 29 19:52:42 crc kubenswrapper[4792]: I0929 19:52:42.430807 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rvxld\" (UniqueName: \"kubernetes.io/projected/eaac4008-c518-43e7-b49d-03b6adabd8f7-kube-api-access-rvxld\") pod \"redhat-marketplace-vlv5c\" (UID: \"eaac4008-c518-43e7-b49d-03b6adabd8f7\") " pod="openshift-marketplace/redhat-marketplace-vlv5c" Sep 29 19:52:42 crc kubenswrapper[4792]: I0929 19:52:42.430926 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/eaac4008-c518-43e7-b49d-03b6adabd8f7-catalog-content\") pod \"redhat-marketplace-vlv5c\" (UID: \"eaac4008-c518-43e7-b49d-03b6adabd8f7\") " pod="openshift-marketplace/redhat-marketplace-vlv5c" Sep 29 19:52:42 crc kubenswrapper[4792]: I0929 19:52:42.431024 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/eaac4008-c518-43e7-b49d-03b6adabd8f7-utilities\") pod \"redhat-marketplace-vlv5c\" (UID: \"eaac4008-c518-43e7-b49d-03b6adabd8f7\") " pod="openshift-marketplace/redhat-marketplace-vlv5c" Sep 29 19:52:42 crc kubenswrapper[4792]: I0929 19:52:42.431419 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/eaac4008-c518-43e7-b49d-03b6adabd8f7-utilities\") pod \"redhat-marketplace-vlv5c\" (UID: \"eaac4008-c518-43e7-b49d-03b6adabd8f7\") " pod="openshift-marketplace/redhat-marketplace-vlv5c" Sep 29 19:52:42 crc kubenswrapper[4792]: I0929 19:52:42.431966 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/eaac4008-c518-43e7-b49d-03b6adabd8f7-catalog-content\") pod \"redhat-marketplace-vlv5c\" (UID: \"eaac4008-c518-43e7-b49d-03b6adabd8f7\") " pod="openshift-marketplace/redhat-marketplace-vlv5c" Sep 29 19:52:42 crc kubenswrapper[4792]: I0929 19:52:42.449591 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rvxld\" (UniqueName: \"kubernetes.io/projected/eaac4008-c518-43e7-b49d-03b6adabd8f7-kube-api-access-rvxld\") pod \"redhat-marketplace-vlv5c\" (UID: \"eaac4008-c518-43e7-b49d-03b6adabd8f7\") " pod="openshift-marketplace/redhat-marketplace-vlv5c" Sep 29 19:52:42 crc kubenswrapper[4792]: I0929 19:52:42.548906 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-vlv5c" Sep 29 19:52:43 crc kubenswrapper[4792]: I0929 19:52:43.109608 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-vlv5c"] Sep 29 19:52:43 crc kubenswrapper[4792]: I0929 19:52:43.978166 4792 generic.go:334] "Generic (PLEG): container finished" podID="eaac4008-c518-43e7-b49d-03b6adabd8f7" containerID="ff5ebadc7abfb0d81e3349400009dcc3d7d3d228a515f96e0c62d5843b82f68f" exitCode=0 Sep 29 19:52:43 crc kubenswrapper[4792]: I0929 19:52:43.978576 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-vlv5c" event={"ID":"eaac4008-c518-43e7-b49d-03b6adabd8f7","Type":"ContainerDied","Data":"ff5ebadc7abfb0d81e3349400009dcc3d7d3d228a515f96e0c62d5843b82f68f"} Sep 29 19:52:43 crc kubenswrapper[4792]: I0929 19:52:43.978624 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-vlv5c" event={"ID":"eaac4008-c518-43e7-b49d-03b6adabd8f7","Type":"ContainerStarted","Data":"e5ec13d3112c5508a19e25a0b83b9cf0c2bf83c522a9be080a32a963b292fc76"} Sep 29 19:52:44 crc kubenswrapper[4792]: I0929 19:52:44.989256 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-vlv5c" event={"ID":"eaac4008-c518-43e7-b49d-03b6adabd8f7","Type":"ContainerStarted","Data":"43f3da2a3616723f6f5aef300139a693c76091b9ade748f359e3c6bd2498d344"} Sep 29 19:52:46 crc kubenswrapper[4792]: I0929 19:52:45.999956 4792 generic.go:334] "Generic (PLEG): container finished" podID="eaac4008-c518-43e7-b49d-03b6adabd8f7" containerID="43f3da2a3616723f6f5aef300139a693c76091b9ade748f359e3c6bd2498d344" exitCode=0 Sep 29 19:52:46 crc kubenswrapper[4792]: I0929 19:52:46.000183 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-vlv5c" event={"ID":"eaac4008-c518-43e7-b49d-03b6adabd8f7","Type":"ContainerDied","Data":"43f3da2a3616723f6f5aef300139a693c76091b9ade748f359e3c6bd2498d344"} Sep 29 19:52:47 crc kubenswrapper[4792]: I0929 19:52:47.011012 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-vlv5c" event={"ID":"eaac4008-c518-43e7-b49d-03b6adabd8f7","Type":"ContainerStarted","Data":"515ece92825c9d277fa99113a0d7fc874fce43371edbcff63681b278d6568dd3"} Sep 29 19:52:47 crc kubenswrapper[4792]: I0929 19:52:47.035664 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-vlv5c" podStartSLOduration=2.604992775 podStartE2EDuration="5.035623013s" podCreationTimestamp="2025-09-29 19:52:42 +0000 UTC" firstStartedPulling="2025-09-29 19:52:43.981153593 +0000 UTC m=+3375.974460999" lastFinishedPulling="2025-09-29 19:52:46.411783841 +0000 UTC m=+3378.405091237" observedRunningTime="2025-09-29 19:52:47.034825892 +0000 UTC m=+3379.028133328" watchObservedRunningTime="2025-09-29 19:52:47.035623013 +0000 UTC m=+3379.028930409" Sep 29 19:52:50 crc kubenswrapper[4792]: I0929 19:52:50.015225 4792 scope.go:117] "RemoveContainer" containerID="b15932957497b7a32079aab059d55a779c760572378662950e68542536286e36" Sep 29 19:52:50 crc kubenswrapper[4792]: E0929 19:52:50.015529 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p5q59_openshift-machine-config-operator(0ae66548-086e-4ca9-bd6f-281ce46e7557)\"" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" podUID="0ae66548-086e-4ca9-bd6f-281ce46e7557" Sep 29 19:52:52 crc kubenswrapper[4792]: I0929 19:52:52.100678 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-cldvf"] Sep 29 19:52:52 crc kubenswrapper[4792]: I0929 19:52:52.102715 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-cldvf" Sep 29 19:52:52 crc kubenswrapper[4792]: I0929 19:52:52.113318 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-cldvf"] Sep 29 19:52:52 crc kubenswrapper[4792]: I0929 19:52:52.203989 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f75f0a93-8867-45b8-8a7a-c4c45383fd98-catalog-content\") pod \"certified-operators-cldvf\" (UID: \"f75f0a93-8867-45b8-8a7a-c4c45383fd98\") " pod="openshift-marketplace/certified-operators-cldvf" Sep 29 19:52:52 crc kubenswrapper[4792]: I0929 19:52:52.204346 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tvvdm\" (UniqueName: \"kubernetes.io/projected/f75f0a93-8867-45b8-8a7a-c4c45383fd98-kube-api-access-tvvdm\") pod \"certified-operators-cldvf\" (UID: \"f75f0a93-8867-45b8-8a7a-c4c45383fd98\") " pod="openshift-marketplace/certified-operators-cldvf" Sep 29 19:52:52 crc kubenswrapper[4792]: I0929 19:52:52.204444 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f75f0a93-8867-45b8-8a7a-c4c45383fd98-utilities\") pod \"certified-operators-cldvf\" (UID: \"f75f0a93-8867-45b8-8a7a-c4c45383fd98\") " pod="openshift-marketplace/certified-operators-cldvf" Sep 29 19:52:52 crc kubenswrapper[4792]: I0929 19:52:52.305243 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f75f0a93-8867-45b8-8a7a-c4c45383fd98-catalog-content\") pod \"certified-operators-cldvf\" (UID: \"f75f0a93-8867-45b8-8a7a-c4c45383fd98\") " pod="openshift-marketplace/certified-operators-cldvf" Sep 29 19:52:52 crc kubenswrapper[4792]: I0929 19:52:52.305362 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tvvdm\" (UniqueName: \"kubernetes.io/projected/f75f0a93-8867-45b8-8a7a-c4c45383fd98-kube-api-access-tvvdm\") pod \"certified-operators-cldvf\" (UID: \"f75f0a93-8867-45b8-8a7a-c4c45383fd98\") " pod="openshift-marketplace/certified-operators-cldvf" Sep 29 19:52:52 crc kubenswrapper[4792]: I0929 19:52:52.305724 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f75f0a93-8867-45b8-8a7a-c4c45383fd98-utilities\") pod \"certified-operators-cldvf\" (UID: \"f75f0a93-8867-45b8-8a7a-c4c45383fd98\") " pod="openshift-marketplace/certified-operators-cldvf" Sep 29 19:52:52 crc kubenswrapper[4792]: I0929 19:52:52.305757 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f75f0a93-8867-45b8-8a7a-c4c45383fd98-catalog-content\") pod \"certified-operators-cldvf\" (UID: \"f75f0a93-8867-45b8-8a7a-c4c45383fd98\") " pod="openshift-marketplace/certified-operators-cldvf" Sep 29 19:52:52 crc kubenswrapper[4792]: I0929 19:52:52.306082 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f75f0a93-8867-45b8-8a7a-c4c45383fd98-utilities\") pod \"certified-operators-cldvf\" (UID: \"f75f0a93-8867-45b8-8a7a-c4c45383fd98\") " pod="openshift-marketplace/certified-operators-cldvf" Sep 29 19:52:52 crc kubenswrapper[4792]: I0929 19:52:52.326813 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tvvdm\" (UniqueName: \"kubernetes.io/projected/f75f0a93-8867-45b8-8a7a-c4c45383fd98-kube-api-access-tvvdm\") pod \"certified-operators-cldvf\" (UID: \"f75f0a93-8867-45b8-8a7a-c4c45383fd98\") " pod="openshift-marketplace/certified-operators-cldvf" Sep 29 19:52:52 crc kubenswrapper[4792]: I0929 19:52:52.434012 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-cldvf" Sep 29 19:52:52 crc kubenswrapper[4792]: I0929 19:52:52.549701 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-vlv5c" Sep 29 19:52:52 crc kubenswrapper[4792]: I0929 19:52:52.549746 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-vlv5c" Sep 29 19:52:52 crc kubenswrapper[4792]: I0929 19:52:52.660702 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-vlv5c" Sep 29 19:52:53 crc kubenswrapper[4792]: I0929 19:52:53.098427 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-cldvf"] Sep 29 19:52:53 crc kubenswrapper[4792]: I0929 19:52:53.128563 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-vlv5c" Sep 29 19:52:54 crc kubenswrapper[4792]: I0929 19:52:54.070439 4792 generic.go:334] "Generic (PLEG): container finished" podID="f75f0a93-8867-45b8-8a7a-c4c45383fd98" containerID="6b2be73a4e50850e1c67030fc946c19384c7576b460403b29779be91d357c98c" exitCode=0 Sep 29 19:52:54 crc kubenswrapper[4792]: I0929 19:52:54.070714 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-cldvf" event={"ID":"f75f0a93-8867-45b8-8a7a-c4c45383fd98","Type":"ContainerDied","Data":"6b2be73a4e50850e1c67030fc946c19384c7576b460403b29779be91d357c98c"} Sep 29 19:52:54 crc kubenswrapper[4792]: I0929 19:52:54.070794 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-cldvf" event={"ID":"f75f0a93-8867-45b8-8a7a-c4c45383fd98","Type":"ContainerStarted","Data":"d143d5b8e9c8d1041edcf1a0d8a527fa747a921819c80746767b8f9f48fbed27"} Sep 29 19:52:55 crc kubenswrapper[4792]: I0929 19:52:55.082385 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-vlv5c"] Sep 29 19:52:55 crc kubenswrapper[4792]: I0929 19:52:55.084042 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-cldvf" event={"ID":"f75f0a93-8867-45b8-8a7a-c4c45383fd98","Type":"ContainerStarted","Data":"de2342a5ba01db89fcf6e9f4cafb6a7be7509c691b4dd01cb6f39f1553cbfa10"} Sep 29 19:52:55 crc kubenswrapper[4792]: I0929 19:52:55.084473 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-vlv5c" podUID="eaac4008-c518-43e7-b49d-03b6adabd8f7" containerName="registry-server" containerID="cri-o://515ece92825c9d277fa99113a0d7fc874fce43371edbcff63681b278d6568dd3" gracePeriod=2 Sep 29 19:52:55 crc kubenswrapper[4792]: I0929 19:52:55.694062 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-vlv5c" Sep 29 19:52:55 crc kubenswrapper[4792]: I0929 19:52:55.780389 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rvxld\" (UniqueName: \"kubernetes.io/projected/eaac4008-c518-43e7-b49d-03b6adabd8f7-kube-api-access-rvxld\") pod \"eaac4008-c518-43e7-b49d-03b6adabd8f7\" (UID: \"eaac4008-c518-43e7-b49d-03b6adabd8f7\") " Sep 29 19:52:55 crc kubenswrapper[4792]: I0929 19:52:55.780833 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/eaac4008-c518-43e7-b49d-03b6adabd8f7-catalog-content\") pod \"eaac4008-c518-43e7-b49d-03b6adabd8f7\" (UID: \"eaac4008-c518-43e7-b49d-03b6adabd8f7\") " Sep 29 19:52:55 crc kubenswrapper[4792]: I0929 19:52:55.780910 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/eaac4008-c518-43e7-b49d-03b6adabd8f7-utilities\") pod \"eaac4008-c518-43e7-b49d-03b6adabd8f7\" (UID: \"eaac4008-c518-43e7-b49d-03b6adabd8f7\") " Sep 29 19:52:55 crc kubenswrapper[4792]: I0929 19:52:55.782108 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/eaac4008-c518-43e7-b49d-03b6adabd8f7-utilities" (OuterVolumeSpecName: "utilities") pod "eaac4008-c518-43e7-b49d-03b6adabd8f7" (UID: "eaac4008-c518-43e7-b49d-03b6adabd8f7"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 19:52:55 crc kubenswrapper[4792]: I0929 19:52:55.786333 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/eaac4008-c518-43e7-b49d-03b6adabd8f7-kube-api-access-rvxld" (OuterVolumeSpecName: "kube-api-access-rvxld") pod "eaac4008-c518-43e7-b49d-03b6adabd8f7" (UID: "eaac4008-c518-43e7-b49d-03b6adabd8f7"). InnerVolumeSpecName "kube-api-access-rvxld". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:52:55 crc kubenswrapper[4792]: I0929 19:52:55.789273 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/eaac4008-c518-43e7-b49d-03b6adabd8f7-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "eaac4008-c518-43e7-b49d-03b6adabd8f7" (UID: "eaac4008-c518-43e7-b49d-03b6adabd8f7"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 19:52:55 crc kubenswrapper[4792]: I0929 19:52:55.883415 4792 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/eaac4008-c518-43e7-b49d-03b6adabd8f7-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 19:52:55 crc kubenswrapper[4792]: I0929 19:52:55.883458 4792 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/eaac4008-c518-43e7-b49d-03b6adabd8f7-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 19:52:55 crc kubenswrapper[4792]: I0929 19:52:55.883471 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rvxld\" (UniqueName: \"kubernetes.io/projected/eaac4008-c518-43e7-b49d-03b6adabd8f7-kube-api-access-rvxld\") on node \"crc\" DevicePath \"\"" Sep 29 19:52:56 crc kubenswrapper[4792]: I0929 19:52:56.100099 4792 generic.go:334] "Generic (PLEG): container finished" podID="eaac4008-c518-43e7-b49d-03b6adabd8f7" containerID="515ece92825c9d277fa99113a0d7fc874fce43371edbcff63681b278d6568dd3" exitCode=0 Sep 29 19:52:56 crc kubenswrapper[4792]: I0929 19:52:56.100981 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-vlv5c" event={"ID":"eaac4008-c518-43e7-b49d-03b6adabd8f7","Type":"ContainerDied","Data":"515ece92825c9d277fa99113a0d7fc874fce43371edbcff63681b278d6568dd3"} Sep 29 19:52:56 crc kubenswrapper[4792]: I0929 19:52:56.101023 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-vlv5c" event={"ID":"eaac4008-c518-43e7-b49d-03b6adabd8f7","Type":"ContainerDied","Data":"e5ec13d3112c5508a19e25a0b83b9cf0c2bf83c522a9be080a32a963b292fc76"} Sep 29 19:52:56 crc kubenswrapper[4792]: I0929 19:52:56.101040 4792 scope.go:117] "RemoveContainer" containerID="515ece92825c9d277fa99113a0d7fc874fce43371edbcff63681b278d6568dd3" Sep 29 19:52:56 crc kubenswrapper[4792]: I0929 19:52:56.101045 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-vlv5c" Sep 29 19:52:56 crc kubenswrapper[4792]: I0929 19:52:56.134680 4792 scope.go:117] "RemoveContainer" containerID="43f3da2a3616723f6f5aef300139a693c76091b9ade748f359e3c6bd2498d344" Sep 29 19:52:56 crc kubenswrapper[4792]: I0929 19:52:56.151534 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-vlv5c"] Sep 29 19:52:56 crc kubenswrapper[4792]: I0929 19:52:56.162377 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-vlv5c"] Sep 29 19:52:56 crc kubenswrapper[4792]: I0929 19:52:56.163105 4792 scope.go:117] "RemoveContainer" containerID="ff5ebadc7abfb0d81e3349400009dcc3d7d3d228a515f96e0c62d5843b82f68f" Sep 29 19:52:56 crc kubenswrapper[4792]: I0929 19:52:56.225297 4792 scope.go:117] "RemoveContainer" containerID="515ece92825c9d277fa99113a0d7fc874fce43371edbcff63681b278d6568dd3" Sep 29 19:52:56 crc kubenswrapper[4792]: E0929 19:52:56.225692 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"515ece92825c9d277fa99113a0d7fc874fce43371edbcff63681b278d6568dd3\": container with ID starting with 515ece92825c9d277fa99113a0d7fc874fce43371edbcff63681b278d6568dd3 not found: ID does not exist" containerID="515ece92825c9d277fa99113a0d7fc874fce43371edbcff63681b278d6568dd3" Sep 29 19:52:56 crc kubenswrapper[4792]: I0929 19:52:56.225730 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"515ece92825c9d277fa99113a0d7fc874fce43371edbcff63681b278d6568dd3"} err="failed to get container status \"515ece92825c9d277fa99113a0d7fc874fce43371edbcff63681b278d6568dd3\": rpc error: code = NotFound desc = could not find container \"515ece92825c9d277fa99113a0d7fc874fce43371edbcff63681b278d6568dd3\": container with ID starting with 515ece92825c9d277fa99113a0d7fc874fce43371edbcff63681b278d6568dd3 not found: ID does not exist" Sep 29 19:52:56 crc kubenswrapper[4792]: I0929 19:52:56.225755 4792 scope.go:117] "RemoveContainer" containerID="43f3da2a3616723f6f5aef300139a693c76091b9ade748f359e3c6bd2498d344" Sep 29 19:52:56 crc kubenswrapper[4792]: E0929 19:52:56.226109 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"43f3da2a3616723f6f5aef300139a693c76091b9ade748f359e3c6bd2498d344\": container with ID starting with 43f3da2a3616723f6f5aef300139a693c76091b9ade748f359e3c6bd2498d344 not found: ID does not exist" containerID="43f3da2a3616723f6f5aef300139a693c76091b9ade748f359e3c6bd2498d344" Sep 29 19:52:56 crc kubenswrapper[4792]: I0929 19:52:56.226160 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"43f3da2a3616723f6f5aef300139a693c76091b9ade748f359e3c6bd2498d344"} err="failed to get container status \"43f3da2a3616723f6f5aef300139a693c76091b9ade748f359e3c6bd2498d344\": rpc error: code = NotFound desc = could not find container \"43f3da2a3616723f6f5aef300139a693c76091b9ade748f359e3c6bd2498d344\": container with ID starting with 43f3da2a3616723f6f5aef300139a693c76091b9ade748f359e3c6bd2498d344 not found: ID does not exist" Sep 29 19:52:56 crc kubenswrapper[4792]: I0929 19:52:56.226195 4792 scope.go:117] "RemoveContainer" containerID="ff5ebadc7abfb0d81e3349400009dcc3d7d3d228a515f96e0c62d5843b82f68f" Sep 29 19:52:56 crc kubenswrapper[4792]: E0929 19:52:56.226497 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ff5ebadc7abfb0d81e3349400009dcc3d7d3d228a515f96e0c62d5843b82f68f\": container with ID starting with ff5ebadc7abfb0d81e3349400009dcc3d7d3d228a515f96e0c62d5843b82f68f not found: ID does not exist" containerID="ff5ebadc7abfb0d81e3349400009dcc3d7d3d228a515f96e0c62d5843b82f68f" Sep 29 19:52:56 crc kubenswrapper[4792]: I0929 19:52:56.226523 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ff5ebadc7abfb0d81e3349400009dcc3d7d3d228a515f96e0c62d5843b82f68f"} err="failed to get container status \"ff5ebadc7abfb0d81e3349400009dcc3d7d3d228a515f96e0c62d5843b82f68f\": rpc error: code = NotFound desc = could not find container \"ff5ebadc7abfb0d81e3349400009dcc3d7d3d228a515f96e0c62d5843b82f68f\": container with ID starting with ff5ebadc7abfb0d81e3349400009dcc3d7d3d228a515f96e0c62d5843b82f68f not found: ID does not exist" Sep 29 19:52:57 crc kubenswrapper[4792]: I0929 19:52:57.077675 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="eaac4008-c518-43e7-b49d-03b6adabd8f7" path="/var/lib/kubelet/pods/eaac4008-c518-43e7-b49d-03b6adabd8f7/volumes" Sep 29 19:52:57 crc kubenswrapper[4792]: I0929 19:52:57.124222 4792 generic.go:334] "Generic (PLEG): container finished" podID="f75f0a93-8867-45b8-8a7a-c4c45383fd98" containerID="de2342a5ba01db89fcf6e9f4cafb6a7be7509c691b4dd01cb6f39f1553cbfa10" exitCode=0 Sep 29 19:52:57 crc kubenswrapper[4792]: I0929 19:52:57.124300 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-cldvf" event={"ID":"f75f0a93-8867-45b8-8a7a-c4c45383fd98","Type":"ContainerDied","Data":"de2342a5ba01db89fcf6e9f4cafb6a7be7509c691b4dd01cb6f39f1553cbfa10"} Sep 29 19:52:58 crc kubenswrapper[4792]: I0929 19:52:58.136377 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-cldvf" event={"ID":"f75f0a93-8867-45b8-8a7a-c4c45383fd98","Type":"ContainerStarted","Data":"47c54472d5c8fc681892d5dfd4cf1585c9d4593f52e311409bd0fbbaef195c4a"} Sep 29 19:52:58 crc kubenswrapper[4792]: I0929 19:52:58.164955 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-cldvf" podStartSLOduration=2.742257484 podStartE2EDuration="6.164937295s" podCreationTimestamp="2025-09-29 19:52:52 +0000 UTC" firstStartedPulling="2025-09-29 19:52:54.072660273 +0000 UTC m=+3386.065967659" lastFinishedPulling="2025-09-29 19:52:57.495340074 +0000 UTC m=+3389.488647470" observedRunningTime="2025-09-29 19:52:58.160032977 +0000 UTC m=+3390.153340373" watchObservedRunningTime="2025-09-29 19:52:58.164937295 +0000 UTC m=+3390.158244691" Sep 29 19:53:02 crc kubenswrapper[4792]: I0929 19:53:02.434767 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-cldvf" Sep 29 19:53:02 crc kubenswrapper[4792]: I0929 19:53:02.435368 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-cldvf" Sep 29 19:53:02 crc kubenswrapper[4792]: I0929 19:53:02.490297 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-cldvf" Sep 29 19:53:03 crc kubenswrapper[4792]: I0929 19:53:03.016047 4792 scope.go:117] "RemoveContainer" containerID="b15932957497b7a32079aab059d55a779c760572378662950e68542536286e36" Sep 29 19:53:03 crc kubenswrapper[4792]: E0929 19:53:03.016439 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p5q59_openshift-machine-config-operator(0ae66548-086e-4ca9-bd6f-281ce46e7557)\"" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" podUID="0ae66548-086e-4ca9-bd6f-281ce46e7557" Sep 29 19:53:03 crc kubenswrapper[4792]: I0929 19:53:03.247968 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-cldvf" Sep 29 19:53:03 crc kubenswrapper[4792]: I0929 19:53:03.311431 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-cldvf"] Sep 29 19:53:05 crc kubenswrapper[4792]: I0929 19:53:05.192369 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-cldvf" podUID="f75f0a93-8867-45b8-8a7a-c4c45383fd98" containerName="registry-server" containerID="cri-o://47c54472d5c8fc681892d5dfd4cf1585c9d4593f52e311409bd0fbbaef195c4a" gracePeriod=2 Sep 29 19:53:05 crc kubenswrapper[4792]: I0929 19:53:05.848008 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-cldvf" Sep 29 19:53:05 crc kubenswrapper[4792]: I0929 19:53:05.865200 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f75f0a93-8867-45b8-8a7a-c4c45383fd98-utilities\") pod \"f75f0a93-8867-45b8-8a7a-c4c45383fd98\" (UID: \"f75f0a93-8867-45b8-8a7a-c4c45383fd98\") " Sep 29 19:53:05 crc kubenswrapper[4792]: I0929 19:53:05.865257 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tvvdm\" (UniqueName: \"kubernetes.io/projected/f75f0a93-8867-45b8-8a7a-c4c45383fd98-kube-api-access-tvvdm\") pod \"f75f0a93-8867-45b8-8a7a-c4c45383fd98\" (UID: \"f75f0a93-8867-45b8-8a7a-c4c45383fd98\") " Sep 29 19:53:05 crc kubenswrapper[4792]: I0929 19:53:05.865285 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f75f0a93-8867-45b8-8a7a-c4c45383fd98-catalog-content\") pod \"f75f0a93-8867-45b8-8a7a-c4c45383fd98\" (UID: \"f75f0a93-8867-45b8-8a7a-c4c45383fd98\") " Sep 29 19:53:05 crc kubenswrapper[4792]: I0929 19:53:05.866287 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f75f0a93-8867-45b8-8a7a-c4c45383fd98-utilities" (OuterVolumeSpecName: "utilities") pod "f75f0a93-8867-45b8-8a7a-c4c45383fd98" (UID: "f75f0a93-8867-45b8-8a7a-c4c45383fd98"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 19:53:05 crc kubenswrapper[4792]: I0929 19:53:05.873355 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f75f0a93-8867-45b8-8a7a-c4c45383fd98-kube-api-access-tvvdm" (OuterVolumeSpecName: "kube-api-access-tvvdm") pod "f75f0a93-8867-45b8-8a7a-c4c45383fd98" (UID: "f75f0a93-8867-45b8-8a7a-c4c45383fd98"). InnerVolumeSpecName "kube-api-access-tvvdm". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:53:05 crc kubenswrapper[4792]: I0929 19:53:05.951520 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f75f0a93-8867-45b8-8a7a-c4c45383fd98-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "f75f0a93-8867-45b8-8a7a-c4c45383fd98" (UID: "f75f0a93-8867-45b8-8a7a-c4c45383fd98"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 19:53:05 crc kubenswrapper[4792]: I0929 19:53:05.967239 4792 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f75f0a93-8867-45b8-8a7a-c4c45383fd98-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 19:53:05 crc kubenswrapper[4792]: I0929 19:53:05.967272 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tvvdm\" (UniqueName: \"kubernetes.io/projected/f75f0a93-8867-45b8-8a7a-c4c45383fd98-kube-api-access-tvvdm\") on node \"crc\" DevicePath \"\"" Sep 29 19:53:05 crc kubenswrapper[4792]: I0929 19:53:05.967283 4792 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f75f0a93-8867-45b8-8a7a-c4c45383fd98-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 19:53:06 crc kubenswrapper[4792]: I0929 19:53:06.214649 4792 generic.go:334] "Generic (PLEG): container finished" podID="f75f0a93-8867-45b8-8a7a-c4c45383fd98" containerID="47c54472d5c8fc681892d5dfd4cf1585c9d4593f52e311409bd0fbbaef195c4a" exitCode=0 Sep 29 19:53:06 crc kubenswrapper[4792]: I0929 19:53:06.214700 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-cldvf" event={"ID":"f75f0a93-8867-45b8-8a7a-c4c45383fd98","Type":"ContainerDied","Data":"47c54472d5c8fc681892d5dfd4cf1585c9d4593f52e311409bd0fbbaef195c4a"} Sep 29 19:53:06 crc kubenswrapper[4792]: I0929 19:53:06.214729 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-cldvf" event={"ID":"f75f0a93-8867-45b8-8a7a-c4c45383fd98","Type":"ContainerDied","Data":"d143d5b8e9c8d1041edcf1a0d8a527fa747a921819c80746767b8f9f48fbed27"} Sep 29 19:53:06 crc kubenswrapper[4792]: I0929 19:53:06.214750 4792 scope.go:117] "RemoveContainer" containerID="47c54472d5c8fc681892d5dfd4cf1585c9d4593f52e311409bd0fbbaef195c4a" Sep 29 19:53:06 crc kubenswrapper[4792]: I0929 19:53:06.214925 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-cldvf" Sep 29 19:53:06 crc kubenswrapper[4792]: I0929 19:53:06.260726 4792 scope.go:117] "RemoveContainer" containerID="de2342a5ba01db89fcf6e9f4cafb6a7be7509c691b4dd01cb6f39f1553cbfa10" Sep 29 19:53:06 crc kubenswrapper[4792]: I0929 19:53:06.261918 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-cldvf"] Sep 29 19:53:06 crc kubenswrapper[4792]: I0929 19:53:06.276315 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-cldvf"] Sep 29 19:53:06 crc kubenswrapper[4792]: I0929 19:53:06.296640 4792 scope.go:117] "RemoveContainer" containerID="6b2be73a4e50850e1c67030fc946c19384c7576b460403b29779be91d357c98c" Sep 29 19:53:06 crc kubenswrapper[4792]: I0929 19:53:06.386168 4792 scope.go:117] "RemoveContainer" containerID="47c54472d5c8fc681892d5dfd4cf1585c9d4593f52e311409bd0fbbaef195c4a" Sep 29 19:53:06 crc kubenswrapper[4792]: E0929 19:53:06.386901 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"47c54472d5c8fc681892d5dfd4cf1585c9d4593f52e311409bd0fbbaef195c4a\": container with ID starting with 47c54472d5c8fc681892d5dfd4cf1585c9d4593f52e311409bd0fbbaef195c4a not found: ID does not exist" containerID="47c54472d5c8fc681892d5dfd4cf1585c9d4593f52e311409bd0fbbaef195c4a" Sep 29 19:53:06 crc kubenswrapper[4792]: I0929 19:53:06.386941 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"47c54472d5c8fc681892d5dfd4cf1585c9d4593f52e311409bd0fbbaef195c4a"} err="failed to get container status \"47c54472d5c8fc681892d5dfd4cf1585c9d4593f52e311409bd0fbbaef195c4a\": rpc error: code = NotFound desc = could not find container \"47c54472d5c8fc681892d5dfd4cf1585c9d4593f52e311409bd0fbbaef195c4a\": container with ID starting with 47c54472d5c8fc681892d5dfd4cf1585c9d4593f52e311409bd0fbbaef195c4a not found: ID does not exist" Sep 29 19:53:06 crc kubenswrapper[4792]: I0929 19:53:06.386972 4792 scope.go:117] "RemoveContainer" containerID="de2342a5ba01db89fcf6e9f4cafb6a7be7509c691b4dd01cb6f39f1553cbfa10" Sep 29 19:53:06 crc kubenswrapper[4792]: E0929 19:53:06.387329 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"de2342a5ba01db89fcf6e9f4cafb6a7be7509c691b4dd01cb6f39f1553cbfa10\": container with ID starting with de2342a5ba01db89fcf6e9f4cafb6a7be7509c691b4dd01cb6f39f1553cbfa10 not found: ID does not exist" containerID="de2342a5ba01db89fcf6e9f4cafb6a7be7509c691b4dd01cb6f39f1553cbfa10" Sep 29 19:53:06 crc kubenswrapper[4792]: I0929 19:53:06.387371 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"de2342a5ba01db89fcf6e9f4cafb6a7be7509c691b4dd01cb6f39f1553cbfa10"} err="failed to get container status \"de2342a5ba01db89fcf6e9f4cafb6a7be7509c691b4dd01cb6f39f1553cbfa10\": rpc error: code = NotFound desc = could not find container \"de2342a5ba01db89fcf6e9f4cafb6a7be7509c691b4dd01cb6f39f1553cbfa10\": container with ID starting with de2342a5ba01db89fcf6e9f4cafb6a7be7509c691b4dd01cb6f39f1553cbfa10 not found: ID does not exist" Sep 29 19:53:06 crc kubenswrapper[4792]: I0929 19:53:06.387398 4792 scope.go:117] "RemoveContainer" containerID="6b2be73a4e50850e1c67030fc946c19384c7576b460403b29779be91d357c98c" Sep 29 19:53:06 crc kubenswrapper[4792]: E0929 19:53:06.387661 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6b2be73a4e50850e1c67030fc946c19384c7576b460403b29779be91d357c98c\": container with ID starting with 6b2be73a4e50850e1c67030fc946c19384c7576b460403b29779be91d357c98c not found: ID does not exist" containerID="6b2be73a4e50850e1c67030fc946c19384c7576b460403b29779be91d357c98c" Sep 29 19:53:06 crc kubenswrapper[4792]: I0929 19:53:06.387689 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6b2be73a4e50850e1c67030fc946c19384c7576b460403b29779be91d357c98c"} err="failed to get container status \"6b2be73a4e50850e1c67030fc946c19384c7576b460403b29779be91d357c98c\": rpc error: code = NotFound desc = could not find container \"6b2be73a4e50850e1c67030fc946c19384c7576b460403b29779be91d357c98c\": container with ID starting with 6b2be73a4e50850e1c67030fc946c19384c7576b460403b29779be91d357c98c not found: ID does not exist" Sep 29 19:53:07 crc kubenswrapper[4792]: I0929 19:53:07.026682 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f75f0a93-8867-45b8-8a7a-c4c45383fd98" path="/var/lib/kubelet/pods/f75f0a93-8867-45b8-8a7a-c4c45383fd98/volumes" Sep 29 19:53:14 crc kubenswrapper[4792]: I0929 19:53:14.015879 4792 scope.go:117] "RemoveContainer" containerID="b15932957497b7a32079aab059d55a779c760572378662950e68542536286e36" Sep 29 19:53:14 crc kubenswrapper[4792]: E0929 19:53:14.016588 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p5q59_openshift-machine-config-operator(0ae66548-086e-4ca9-bd6f-281ce46e7557)\"" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" podUID="0ae66548-086e-4ca9-bd6f-281ce46e7557" Sep 29 19:53:28 crc kubenswrapper[4792]: I0929 19:53:28.015773 4792 scope.go:117] "RemoveContainer" containerID="b15932957497b7a32079aab059d55a779c760572378662950e68542536286e36" Sep 29 19:53:28 crc kubenswrapper[4792]: E0929 19:53:28.016516 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p5q59_openshift-machine-config-operator(0ae66548-086e-4ca9-bd6f-281ce46e7557)\"" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" podUID="0ae66548-086e-4ca9-bd6f-281ce46e7557" Sep 29 19:53:42 crc kubenswrapper[4792]: I0929 19:53:42.015092 4792 scope.go:117] "RemoveContainer" containerID="b15932957497b7a32079aab059d55a779c760572378662950e68542536286e36" Sep 29 19:53:42 crc kubenswrapper[4792]: I0929 19:53:42.552272 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" event={"ID":"0ae66548-086e-4ca9-bd6f-281ce46e7557","Type":"ContainerStarted","Data":"9aa3c4a784136c52348a1844c48c7382e91e1b68cf942188a07e1a234d234270"} Sep 29 19:56:11 crc kubenswrapper[4792]: I0929 19:56:11.959744 4792 patch_prober.go:28] interesting pod/machine-config-daemon-p5q59 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 19:56:11 crc kubenswrapper[4792]: I0929 19:56:11.960373 4792 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" podUID="0ae66548-086e-4ca9-bd6f-281ce46e7557" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 19:56:41 crc kubenswrapper[4792]: I0929 19:56:41.959658 4792 patch_prober.go:28] interesting pod/machine-config-daemon-p5q59 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 19:56:41 crc kubenswrapper[4792]: I0929 19:56:41.960233 4792 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" podUID="0ae66548-086e-4ca9-bd6f-281ce46e7557" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 19:57:11 crc kubenswrapper[4792]: I0929 19:57:11.959358 4792 patch_prober.go:28] interesting pod/machine-config-daemon-p5q59 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 19:57:11 crc kubenswrapper[4792]: I0929 19:57:11.959897 4792 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" podUID="0ae66548-086e-4ca9-bd6f-281ce46e7557" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 19:57:11 crc kubenswrapper[4792]: I0929 19:57:11.959940 4792 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" Sep 29 19:57:11 crc kubenswrapper[4792]: I0929 19:57:11.960589 4792 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"9aa3c4a784136c52348a1844c48c7382e91e1b68cf942188a07e1a234d234270"} pod="openshift-machine-config-operator/machine-config-daemon-p5q59" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 29 19:57:11 crc kubenswrapper[4792]: I0929 19:57:11.961456 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" podUID="0ae66548-086e-4ca9-bd6f-281ce46e7557" containerName="machine-config-daemon" containerID="cri-o://9aa3c4a784136c52348a1844c48c7382e91e1b68cf942188a07e1a234d234270" gracePeriod=600 Sep 29 19:57:12 crc kubenswrapper[4792]: I0929 19:57:12.454224 4792 generic.go:334] "Generic (PLEG): container finished" podID="0ae66548-086e-4ca9-bd6f-281ce46e7557" containerID="9aa3c4a784136c52348a1844c48c7382e91e1b68cf942188a07e1a234d234270" exitCode=0 Sep 29 19:57:12 crc kubenswrapper[4792]: I0929 19:57:12.454276 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" event={"ID":"0ae66548-086e-4ca9-bd6f-281ce46e7557","Type":"ContainerDied","Data":"9aa3c4a784136c52348a1844c48c7382e91e1b68cf942188a07e1a234d234270"} Sep 29 19:57:12 crc kubenswrapper[4792]: I0929 19:57:12.454309 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" event={"ID":"0ae66548-086e-4ca9-bd6f-281ce46e7557","Type":"ContainerStarted","Data":"6b9ec2c21abcc9abf3154171029946c7d3fe35239bc638c8d343773772ac556c"} Sep 29 19:57:12 crc kubenswrapper[4792]: I0929 19:57:12.454330 4792 scope.go:117] "RemoveContainer" containerID="b15932957497b7a32079aab059d55a779c760572378662950e68542536286e36" Sep 29 19:58:13 crc kubenswrapper[4792]: I0929 19:58:13.998627 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-tjwlj"] Sep 29 19:58:14 crc kubenswrapper[4792]: E0929 19:58:13.999471 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="eaac4008-c518-43e7-b49d-03b6adabd8f7" containerName="registry-server" Sep 29 19:58:14 crc kubenswrapper[4792]: I0929 19:58:13.999484 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="eaac4008-c518-43e7-b49d-03b6adabd8f7" containerName="registry-server" Sep 29 19:58:14 crc kubenswrapper[4792]: E0929 19:58:13.999493 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f75f0a93-8867-45b8-8a7a-c4c45383fd98" containerName="extract-utilities" Sep 29 19:58:14 crc kubenswrapper[4792]: I0929 19:58:13.999499 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="f75f0a93-8867-45b8-8a7a-c4c45383fd98" containerName="extract-utilities" Sep 29 19:58:14 crc kubenswrapper[4792]: E0929 19:58:13.999520 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="eaac4008-c518-43e7-b49d-03b6adabd8f7" containerName="extract-content" Sep 29 19:58:14 crc kubenswrapper[4792]: I0929 19:58:13.999525 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="eaac4008-c518-43e7-b49d-03b6adabd8f7" containerName="extract-content" Sep 29 19:58:14 crc kubenswrapper[4792]: E0929 19:58:13.999542 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="eaac4008-c518-43e7-b49d-03b6adabd8f7" containerName="extract-utilities" Sep 29 19:58:14 crc kubenswrapper[4792]: I0929 19:58:13.999548 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="eaac4008-c518-43e7-b49d-03b6adabd8f7" containerName="extract-utilities" Sep 29 19:58:14 crc kubenswrapper[4792]: E0929 19:58:13.999557 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f75f0a93-8867-45b8-8a7a-c4c45383fd98" containerName="registry-server" Sep 29 19:58:14 crc kubenswrapper[4792]: I0929 19:58:13.999562 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="f75f0a93-8867-45b8-8a7a-c4c45383fd98" containerName="registry-server" Sep 29 19:58:14 crc kubenswrapper[4792]: E0929 19:58:13.999576 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f75f0a93-8867-45b8-8a7a-c4c45383fd98" containerName="extract-content" Sep 29 19:58:14 crc kubenswrapper[4792]: I0929 19:58:13.999581 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="f75f0a93-8867-45b8-8a7a-c4c45383fd98" containerName="extract-content" Sep 29 19:58:14 crc kubenswrapper[4792]: I0929 19:58:13.999753 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="f75f0a93-8867-45b8-8a7a-c4c45383fd98" containerName="registry-server" Sep 29 19:58:14 crc kubenswrapper[4792]: I0929 19:58:13.999767 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="eaac4008-c518-43e7-b49d-03b6adabd8f7" containerName="registry-server" Sep 29 19:58:14 crc kubenswrapper[4792]: I0929 19:58:14.001117 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-tjwlj" Sep 29 19:58:14 crc kubenswrapper[4792]: I0929 19:58:14.017340 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-tjwlj"] Sep 29 19:58:14 crc kubenswrapper[4792]: I0929 19:58:14.070282 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/152fa569-48db-46f9-964a-99ea888e86ed-catalog-content\") pod \"redhat-operators-tjwlj\" (UID: \"152fa569-48db-46f9-964a-99ea888e86ed\") " pod="openshift-marketplace/redhat-operators-tjwlj" Sep 29 19:58:14 crc kubenswrapper[4792]: I0929 19:58:14.070368 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/152fa569-48db-46f9-964a-99ea888e86ed-utilities\") pod \"redhat-operators-tjwlj\" (UID: \"152fa569-48db-46f9-964a-99ea888e86ed\") " pod="openshift-marketplace/redhat-operators-tjwlj" Sep 29 19:58:14 crc kubenswrapper[4792]: I0929 19:58:14.070967 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jrdb4\" (UniqueName: \"kubernetes.io/projected/152fa569-48db-46f9-964a-99ea888e86ed-kube-api-access-jrdb4\") pod \"redhat-operators-tjwlj\" (UID: \"152fa569-48db-46f9-964a-99ea888e86ed\") " pod="openshift-marketplace/redhat-operators-tjwlj" Sep 29 19:58:14 crc kubenswrapper[4792]: I0929 19:58:14.172535 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jrdb4\" (UniqueName: \"kubernetes.io/projected/152fa569-48db-46f9-964a-99ea888e86ed-kube-api-access-jrdb4\") pod \"redhat-operators-tjwlj\" (UID: \"152fa569-48db-46f9-964a-99ea888e86ed\") " pod="openshift-marketplace/redhat-operators-tjwlj" Sep 29 19:58:14 crc kubenswrapper[4792]: I0929 19:58:14.173040 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/152fa569-48db-46f9-964a-99ea888e86ed-catalog-content\") pod \"redhat-operators-tjwlj\" (UID: \"152fa569-48db-46f9-964a-99ea888e86ed\") " pod="openshift-marketplace/redhat-operators-tjwlj" Sep 29 19:58:14 crc kubenswrapper[4792]: I0929 19:58:14.173489 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/152fa569-48db-46f9-964a-99ea888e86ed-catalog-content\") pod \"redhat-operators-tjwlj\" (UID: \"152fa569-48db-46f9-964a-99ea888e86ed\") " pod="openshift-marketplace/redhat-operators-tjwlj" Sep 29 19:58:14 crc kubenswrapper[4792]: I0929 19:58:14.174000 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/152fa569-48db-46f9-964a-99ea888e86ed-utilities\") pod \"redhat-operators-tjwlj\" (UID: \"152fa569-48db-46f9-964a-99ea888e86ed\") " pod="openshift-marketplace/redhat-operators-tjwlj" Sep 29 19:58:14 crc kubenswrapper[4792]: I0929 19:58:14.174517 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/152fa569-48db-46f9-964a-99ea888e86ed-utilities\") pod \"redhat-operators-tjwlj\" (UID: \"152fa569-48db-46f9-964a-99ea888e86ed\") " pod="openshift-marketplace/redhat-operators-tjwlj" Sep 29 19:58:14 crc kubenswrapper[4792]: I0929 19:58:14.195173 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jrdb4\" (UniqueName: \"kubernetes.io/projected/152fa569-48db-46f9-964a-99ea888e86ed-kube-api-access-jrdb4\") pod \"redhat-operators-tjwlj\" (UID: \"152fa569-48db-46f9-964a-99ea888e86ed\") " pod="openshift-marketplace/redhat-operators-tjwlj" Sep 29 19:58:14 crc kubenswrapper[4792]: I0929 19:58:14.318879 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-tjwlj" Sep 29 19:58:14 crc kubenswrapper[4792]: I0929 19:58:14.798819 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-tjwlj"] Sep 29 19:58:14 crc kubenswrapper[4792]: I0929 19:58:14.993954 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-tjwlj" event={"ID":"152fa569-48db-46f9-964a-99ea888e86ed","Type":"ContainerStarted","Data":"11edc3c281013e3af8091bd41a2bc4c5413861efe05d627251f688709e5cc026"} Sep 29 19:58:14 crc kubenswrapper[4792]: I0929 19:58:14.994261 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-tjwlj" event={"ID":"152fa569-48db-46f9-964a-99ea888e86ed","Type":"ContainerStarted","Data":"f06f3ff7fa77a72a312903392991e701cd12b5983d1fbc14012eaba48f08df00"} Sep 29 19:58:16 crc kubenswrapper[4792]: I0929 19:58:16.003918 4792 generic.go:334] "Generic (PLEG): container finished" podID="152fa569-48db-46f9-964a-99ea888e86ed" containerID="11edc3c281013e3af8091bd41a2bc4c5413861efe05d627251f688709e5cc026" exitCode=0 Sep 29 19:58:16 crc kubenswrapper[4792]: I0929 19:58:16.003975 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-tjwlj" event={"ID":"152fa569-48db-46f9-964a-99ea888e86ed","Type":"ContainerDied","Data":"11edc3c281013e3af8091bd41a2bc4c5413861efe05d627251f688709e5cc026"} Sep 29 19:58:16 crc kubenswrapper[4792]: I0929 19:58:16.006746 4792 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Sep 29 19:58:17 crc kubenswrapper[4792]: I0929 19:58:17.027892 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-tjwlj" event={"ID":"152fa569-48db-46f9-964a-99ea888e86ed","Type":"ContainerStarted","Data":"8e2108fe4503d7e407ca497e255167300b57c00f6e10fc7b94dd7481f66e4476"} Sep 29 19:58:22 crc kubenswrapper[4792]: I0929 19:58:22.057603 4792 generic.go:334] "Generic (PLEG): container finished" podID="152fa569-48db-46f9-964a-99ea888e86ed" containerID="8e2108fe4503d7e407ca497e255167300b57c00f6e10fc7b94dd7481f66e4476" exitCode=0 Sep 29 19:58:22 crc kubenswrapper[4792]: I0929 19:58:22.057677 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-tjwlj" event={"ID":"152fa569-48db-46f9-964a-99ea888e86ed","Type":"ContainerDied","Data":"8e2108fe4503d7e407ca497e255167300b57c00f6e10fc7b94dd7481f66e4476"} Sep 29 19:58:23 crc kubenswrapper[4792]: I0929 19:58:23.070457 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-tjwlj" event={"ID":"152fa569-48db-46f9-964a-99ea888e86ed","Type":"ContainerStarted","Data":"2b5dd0d8e1c9407b2128e387315eb4ff7cf9306e4d1225323607f08f37bceaaf"} Sep 29 19:58:23 crc kubenswrapper[4792]: I0929 19:58:23.086625 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-tjwlj" podStartSLOduration=3.47336903 podStartE2EDuration="10.086605887s" podCreationTimestamp="2025-09-29 19:58:13 +0000 UTC" firstStartedPulling="2025-09-29 19:58:16.006416172 +0000 UTC m=+3707.999723568" lastFinishedPulling="2025-09-29 19:58:22.619653029 +0000 UTC m=+3714.612960425" observedRunningTime="2025-09-29 19:58:23.085750455 +0000 UTC m=+3715.079057871" watchObservedRunningTime="2025-09-29 19:58:23.086605887 +0000 UTC m=+3715.079913273" Sep 29 19:58:24 crc kubenswrapper[4792]: I0929 19:58:24.319735 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-tjwlj" Sep 29 19:58:24 crc kubenswrapper[4792]: I0929 19:58:24.319795 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-tjwlj" Sep 29 19:58:25 crc kubenswrapper[4792]: I0929 19:58:25.363682 4792 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-tjwlj" podUID="152fa569-48db-46f9-964a-99ea888e86ed" containerName="registry-server" probeResult="failure" output=< Sep 29 19:58:25 crc kubenswrapper[4792]: timeout: failed to connect service ":50051" within 1s Sep 29 19:58:25 crc kubenswrapper[4792]: > Sep 29 19:58:35 crc kubenswrapper[4792]: I0929 19:58:35.360947 4792 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-tjwlj" podUID="152fa569-48db-46f9-964a-99ea888e86ed" containerName="registry-server" probeResult="failure" output=< Sep 29 19:58:35 crc kubenswrapper[4792]: timeout: failed to connect service ":50051" within 1s Sep 29 19:58:35 crc kubenswrapper[4792]: > Sep 29 19:58:44 crc kubenswrapper[4792]: I0929 19:58:44.370411 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-tjwlj" Sep 29 19:58:44 crc kubenswrapper[4792]: I0929 19:58:44.416208 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-tjwlj" Sep 29 19:58:45 crc kubenswrapper[4792]: I0929 19:58:45.206125 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-tjwlj"] Sep 29 19:58:46 crc kubenswrapper[4792]: I0929 19:58:46.273787 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-tjwlj" podUID="152fa569-48db-46f9-964a-99ea888e86ed" containerName="registry-server" containerID="cri-o://2b5dd0d8e1c9407b2128e387315eb4ff7cf9306e4d1225323607f08f37bceaaf" gracePeriod=2 Sep 29 19:58:46 crc kubenswrapper[4792]: I0929 19:58:46.988482 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-tjwlj" Sep 29 19:58:47 crc kubenswrapper[4792]: I0929 19:58:47.024187 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/152fa569-48db-46f9-964a-99ea888e86ed-catalog-content\") pod \"152fa569-48db-46f9-964a-99ea888e86ed\" (UID: \"152fa569-48db-46f9-964a-99ea888e86ed\") " Sep 29 19:58:47 crc kubenswrapper[4792]: I0929 19:58:47.024353 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jrdb4\" (UniqueName: \"kubernetes.io/projected/152fa569-48db-46f9-964a-99ea888e86ed-kube-api-access-jrdb4\") pod \"152fa569-48db-46f9-964a-99ea888e86ed\" (UID: \"152fa569-48db-46f9-964a-99ea888e86ed\") " Sep 29 19:58:47 crc kubenswrapper[4792]: I0929 19:58:47.024384 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/152fa569-48db-46f9-964a-99ea888e86ed-utilities\") pod \"152fa569-48db-46f9-964a-99ea888e86ed\" (UID: \"152fa569-48db-46f9-964a-99ea888e86ed\") " Sep 29 19:58:47 crc kubenswrapper[4792]: I0929 19:58:47.026706 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/152fa569-48db-46f9-964a-99ea888e86ed-utilities" (OuterVolumeSpecName: "utilities") pod "152fa569-48db-46f9-964a-99ea888e86ed" (UID: "152fa569-48db-46f9-964a-99ea888e86ed"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 19:58:47 crc kubenswrapper[4792]: I0929 19:58:47.097620 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/152fa569-48db-46f9-964a-99ea888e86ed-kube-api-access-jrdb4" (OuterVolumeSpecName: "kube-api-access-jrdb4") pod "152fa569-48db-46f9-964a-99ea888e86ed" (UID: "152fa569-48db-46f9-964a-99ea888e86ed"). InnerVolumeSpecName "kube-api-access-jrdb4". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:58:47 crc kubenswrapper[4792]: I0929 19:58:47.127477 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jrdb4\" (UniqueName: \"kubernetes.io/projected/152fa569-48db-46f9-964a-99ea888e86ed-kube-api-access-jrdb4\") on node \"crc\" DevicePath \"\"" Sep 29 19:58:47 crc kubenswrapper[4792]: I0929 19:58:47.127502 4792 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/152fa569-48db-46f9-964a-99ea888e86ed-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 19:58:47 crc kubenswrapper[4792]: I0929 19:58:47.154581 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/152fa569-48db-46f9-964a-99ea888e86ed-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "152fa569-48db-46f9-964a-99ea888e86ed" (UID: "152fa569-48db-46f9-964a-99ea888e86ed"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 19:58:47 crc kubenswrapper[4792]: I0929 19:58:47.228776 4792 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/152fa569-48db-46f9-964a-99ea888e86ed-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 19:58:47 crc kubenswrapper[4792]: I0929 19:58:47.294552 4792 generic.go:334] "Generic (PLEG): container finished" podID="152fa569-48db-46f9-964a-99ea888e86ed" containerID="2b5dd0d8e1c9407b2128e387315eb4ff7cf9306e4d1225323607f08f37bceaaf" exitCode=0 Sep 29 19:58:47 crc kubenswrapper[4792]: I0929 19:58:47.294592 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-tjwlj" event={"ID":"152fa569-48db-46f9-964a-99ea888e86ed","Type":"ContainerDied","Data":"2b5dd0d8e1c9407b2128e387315eb4ff7cf9306e4d1225323607f08f37bceaaf"} Sep 29 19:58:47 crc kubenswrapper[4792]: I0929 19:58:47.294625 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-tjwlj" event={"ID":"152fa569-48db-46f9-964a-99ea888e86ed","Type":"ContainerDied","Data":"f06f3ff7fa77a72a312903392991e701cd12b5983d1fbc14012eaba48f08df00"} Sep 29 19:58:47 crc kubenswrapper[4792]: I0929 19:58:47.294633 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-tjwlj" Sep 29 19:58:47 crc kubenswrapper[4792]: I0929 19:58:47.294647 4792 scope.go:117] "RemoveContainer" containerID="2b5dd0d8e1c9407b2128e387315eb4ff7cf9306e4d1225323607f08f37bceaaf" Sep 29 19:58:47 crc kubenswrapper[4792]: I0929 19:58:47.325959 4792 scope.go:117] "RemoveContainer" containerID="8e2108fe4503d7e407ca497e255167300b57c00f6e10fc7b94dd7481f66e4476" Sep 29 19:58:47 crc kubenswrapper[4792]: I0929 19:58:47.352288 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-tjwlj"] Sep 29 19:58:47 crc kubenswrapper[4792]: I0929 19:58:47.359938 4792 scope.go:117] "RemoveContainer" containerID="11edc3c281013e3af8091bd41a2bc4c5413861efe05d627251f688709e5cc026" Sep 29 19:58:47 crc kubenswrapper[4792]: I0929 19:58:47.379148 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-tjwlj"] Sep 29 19:58:47 crc kubenswrapper[4792]: I0929 19:58:47.406627 4792 scope.go:117] "RemoveContainer" containerID="2b5dd0d8e1c9407b2128e387315eb4ff7cf9306e4d1225323607f08f37bceaaf" Sep 29 19:58:47 crc kubenswrapper[4792]: E0929 19:58:47.407211 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2b5dd0d8e1c9407b2128e387315eb4ff7cf9306e4d1225323607f08f37bceaaf\": container with ID starting with 2b5dd0d8e1c9407b2128e387315eb4ff7cf9306e4d1225323607f08f37bceaaf not found: ID does not exist" containerID="2b5dd0d8e1c9407b2128e387315eb4ff7cf9306e4d1225323607f08f37bceaaf" Sep 29 19:58:47 crc kubenswrapper[4792]: I0929 19:58:47.407349 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2b5dd0d8e1c9407b2128e387315eb4ff7cf9306e4d1225323607f08f37bceaaf"} err="failed to get container status \"2b5dd0d8e1c9407b2128e387315eb4ff7cf9306e4d1225323607f08f37bceaaf\": rpc error: code = NotFound desc = could not find container \"2b5dd0d8e1c9407b2128e387315eb4ff7cf9306e4d1225323607f08f37bceaaf\": container with ID starting with 2b5dd0d8e1c9407b2128e387315eb4ff7cf9306e4d1225323607f08f37bceaaf not found: ID does not exist" Sep 29 19:58:47 crc kubenswrapper[4792]: I0929 19:58:47.407462 4792 scope.go:117] "RemoveContainer" containerID="8e2108fe4503d7e407ca497e255167300b57c00f6e10fc7b94dd7481f66e4476" Sep 29 19:58:47 crc kubenswrapper[4792]: E0929 19:58:47.407945 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8e2108fe4503d7e407ca497e255167300b57c00f6e10fc7b94dd7481f66e4476\": container with ID starting with 8e2108fe4503d7e407ca497e255167300b57c00f6e10fc7b94dd7481f66e4476 not found: ID does not exist" containerID="8e2108fe4503d7e407ca497e255167300b57c00f6e10fc7b94dd7481f66e4476" Sep 29 19:58:47 crc kubenswrapper[4792]: I0929 19:58:47.407975 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8e2108fe4503d7e407ca497e255167300b57c00f6e10fc7b94dd7481f66e4476"} err="failed to get container status \"8e2108fe4503d7e407ca497e255167300b57c00f6e10fc7b94dd7481f66e4476\": rpc error: code = NotFound desc = could not find container \"8e2108fe4503d7e407ca497e255167300b57c00f6e10fc7b94dd7481f66e4476\": container with ID starting with 8e2108fe4503d7e407ca497e255167300b57c00f6e10fc7b94dd7481f66e4476 not found: ID does not exist" Sep 29 19:58:47 crc kubenswrapper[4792]: I0929 19:58:47.407997 4792 scope.go:117] "RemoveContainer" containerID="11edc3c281013e3af8091bd41a2bc4c5413861efe05d627251f688709e5cc026" Sep 29 19:58:47 crc kubenswrapper[4792]: E0929 19:58:47.408440 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"11edc3c281013e3af8091bd41a2bc4c5413861efe05d627251f688709e5cc026\": container with ID starting with 11edc3c281013e3af8091bd41a2bc4c5413861efe05d627251f688709e5cc026 not found: ID does not exist" containerID="11edc3c281013e3af8091bd41a2bc4c5413861efe05d627251f688709e5cc026" Sep 29 19:58:47 crc kubenswrapper[4792]: I0929 19:58:47.408463 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"11edc3c281013e3af8091bd41a2bc4c5413861efe05d627251f688709e5cc026"} err="failed to get container status \"11edc3c281013e3af8091bd41a2bc4c5413861efe05d627251f688709e5cc026\": rpc error: code = NotFound desc = could not find container \"11edc3c281013e3af8091bd41a2bc4c5413861efe05d627251f688709e5cc026\": container with ID starting with 11edc3c281013e3af8091bd41a2bc4c5413861efe05d627251f688709e5cc026 not found: ID does not exist" Sep 29 19:58:49 crc kubenswrapper[4792]: I0929 19:58:49.025826 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="152fa569-48db-46f9-964a-99ea888e86ed" path="/var/lib/kubelet/pods/152fa569-48db-46f9-964a-99ea888e86ed/volumes" Sep 29 19:59:41 crc kubenswrapper[4792]: I0929 19:59:41.960024 4792 patch_prober.go:28] interesting pod/machine-config-daemon-p5q59 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 19:59:41 crc kubenswrapper[4792]: I0929 19:59:41.960679 4792 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" podUID="0ae66548-086e-4ca9-bd6f-281ce46e7557" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 20:00:00 crc kubenswrapper[4792]: I0929 20:00:00.159195 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319600-pmk7m"] Sep 29 20:00:00 crc kubenswrapper[4792]: E0929 20:00:00.160191 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="152fa569-48db-46f9-964a-99ea888e86ed" containerName="extract-content" Sep 29 20:00:00 crc kubenswrapper[4792]: I0929 20:00:00.160204 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="152fa569-48db-46f9-964a-99ea888e86ed" containerName="extract-content" Sep 29 20:00:00 crc kubenswrapper[4792]: E0929 20:00:00.160284 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="152fa569-48db-46f9-964a-99ea888e86ed" containerName="registry-server" Sep 29 20:00:00 crc kubenswrapper[4792]: I0929 20:00:00.160293 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="152fa569-48db-46f9-964a-99ea888e86ed" containerName="registry-server" Sep 29 20:00:00 crc kubenswrapper[4792]: E0929 20:00:00.160333 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="152fa569-48db-46f9-964a-99ea888e86ed" containerName="extract-utilities" Sep 29 20:00:00 crc kubenswrapper[4792]: I0929 20:00:00.160341 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="152fa569-48db-46f9-964a-99ea888e86ed" containerName="extract-utilities" Sep 29 20:00:00 crc kubenswrapper[4792]: I0929 20:00:00.160946 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="152fa569-48db-46f9-964a-99ea888e86ed" containerName="registry-server" Sep 29 20:00:00 crc kubenswrapper[4792]: I0929 20:00:00.161610 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319600-pmk7m" Sep 29 20:00:00 crc kubenswrapper[4792]: I0929 20:00:00.166545 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Sep 29 20:00:00 crc kubenswrapper[4792]: I0929 20:00:00.166587 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Sep 29 20:00:00 crc kubenswrapper[4792]: I0929 20:00:00.174346 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319600-pmk7m"] Sep 29 20:00:00 crc kubenswrapper[4792]: I0929 20:00:00.245900 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/64e4d685-3403-4ffb-8c10-ba53ff2aa0a0-secret-volume\") pod \"collect-profiles-29319600-pmk7m\" (UID: \"64e4d685-3403-4ffb-8c10-ba53ff2aa0a0\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319600-pmk7m" Sep 29 20:00:00 crc kubenswrapper[4792]: I0929 20:00:00.245968 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/64e4d685-3403-4ffb-8c10-ba53ff2aa0a0-config-volume\") pod \"collect-profiles-29319600-pmk7m\" (UID: \"64e4d685-3403-4ffb-8c10-ba53ff2aa0a0\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319600-pmk7m" Sep 29 20:00:00 crc kubenswrapper[4792]: I0929 20:00:00.245991 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xcnf2\" (UniqueName: \"kubernetes.io/projected/64e4d685-3403-4ffb-8c10-ba53ff2aa0a0-kube-api-access-xcnf2\") pod \"collect-profiles-29319600-pmk7m\" (UID: \"64e4d685-3403-4ffb-8c10-ba53ff2aa0a0\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319600-pmk7m" Sep 29 20:00:00 crc kubenswrapper[4792]: I0929 20:00:00.348527 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/64e4d685-3403-4ffb-8c10-ba53ff2aa0a0-secret-volume\") pod \"collect-profiles-29319600-pmk7m\" (UID: \"64e4d685-3403-4ffb-8c10-ba53ff2aa0a0\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319600-pmk7m" Sep 29 20:00:00 crc kubenswrapper[4792]: I0929 20:00:00.348890 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/64e4d685-3403-4ffb-8c10-ba53ff2aa0a0-config-volume\") pod \"collect-profiles-29319600-pmk7m\" (UID: \"64e4d685-3403-4ffb-8c10-ba53ff2aa0a0\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319600-pmk7m" Sep 29 20:00:00 crc kubenswrapper[4792]: I0929 20:00:00.348919 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xcnf2\" (UniqueName: \"kubernetes.io/projected/64e4d685-3403-4ffb-8c10-ba53ff2aa0a0-kube-api-access-xcnf2\") pod \"collect-profiles-29319600-pmk7m\" (UID: \"64e4d685-3403-4ffb-8c10-ba53ff2aa0a0\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319600-pmk7m" Sep 29 20:00:00 crc kubenswrapper[4792]: I0929 20:00:00.349829 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/64e4d685-3403-4ffb-8c10-ba53ff2aa0a0-config-volume\") pod \"collect-profiles-29319600-pmk7m\" (UID: \"64e4d685-3403-4ffb-8c10-ba53ff2aa0a0\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319600-pmk7m" Sep 29 20:00:00 crc kubenswrapper[4792]: I0929 20:00:00.358950 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/64e4d685-3403-4ffb-8c10-ba53ff2aa0a0-secret-volume\") pod \"collect-profiles-29319600-pmk7m\" (UID: \"64e4d685-3403-4ffb-8c10-ba53ff2aa0a0\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319600-pmk7m" Sep 29 20:00:00 crc kubenswrapper[4792]: I0929 20:00:00.377627 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xcnf2\" (UniqueName: \"kubernetes.io/projected/64e4d685-3403-4ffb-8c10-ba53ff2aa0a0-kube-api-access-xcnf2\") pod \"collect-profiles-29319600-pmk7m\" (UID: \"64e4d685-3403-4ffb-8c10-ba53ff2aa0a0\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319600-pmk7m" Sep 29 20:00:00 crc kubenswrapper[4792]: I0929 20:00:00.496784 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319600-pmk7m" Sep 29 20:00:00 crc kubenswrapper[4792]: I0929 20:00:00.982179 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319600-pmk7m"] Sep 29 20:00:00 crc kubenswrapper[4792]: W0929 20:00:00.997038 4792 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod64e4d685_3403_4ffb_8c10_ba53ff2aa0a0.slice/crio-c3c640850126a855dfe05b9f0eb5ed07f3e8f7792a1e9d8a73520a4eda07ffff WatchSource:0}: Error finding container c3c640850126a855dfe05b9f0eb5ed07f3e8f7792a1e9d8a73520a4eda07ffff: Status 404 returned error can't find the container with id c3c640850126a855dfe05b9f0eb5ed07f3e8f7792a1e9d8a73520a4eda07ffff Sep 29 20:00:01 crc kubenswrapper[4792]: I0929 20:00:01.935112 4792 generic.go:334] "Generic (PLEG): container finished" podID="64e4d685-3403-4ffb-8c10-ba53ff2aa0a0" containerID="d2fc3f80b0676889912ff829f5152d6347aad6ee63d080f7a576541c16435872" exitCode=0 Sep 29 20:00:01 crc kubenswrapper[4792]: I0929 20:00:01.935341 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29319600-pmk7m" event={"ID":"64e4d685-3403-4ffb-8c10-ba53ff2aa0a0","Type":"ContainerDied","Data":"d2fc3f80b0676889912ff829f5152d6347aad6ee63d080f7a576541c16435872"} Sep 29 20:00:01 crc kubenswrapper[4792]: I0929 20:00:01.935393 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29319600-pmk7m" event={"ID":"64e4d685-3403-4ffb-8c10-ba53ff2aa0a0","Type":"ContainerStarted","Data":"c3c640850126a855dfe05b9f0eb5ed07f3e8f7792a1e9d8a73520a4eda07ffff"} Sep 29 20:00:03 crc kubenswrapper[4792]: I0929 20:00:03.463108 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319600-pmk7m" Sep 29 20:00:03 crc kubenswrapper[4792]: I0929 20:00:03.505080 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/64e4d685-3403-4ffb-8c10-ba53ff2aa0a0-config-volume\") pod \"64e4d685-3403-4ffb-8c10-ba53ff2aa0a0\" (UID: \"64e4d685-3403-4ffb-8c10-ba53ff2aa0a0\") " Sep 29 20:00:03 crc kubenswrapper[4792]: I0929 20:00:03.505232 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/64e4d685-3403-4ffb-8c10-ba53ff2aa0a0-secret-volume\") pod \"64e4d685-3403-4ffb-8c10-ba53ff2aa0a0\" (UID: \"64e4d685-3403-4ffb-8c10-ba53ff2aa0a0\") " Sep 29 20:00:03 crc kubenswrapper[4792]: I0929 20:00:03.505276 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcnf2\" (UniqueName: \"kubernetes.io/projected/64e4d685-3403-4ffb-8c10-ba53ff2aa0a0-kube-api-access-xcnf2\") pod \"64e4d685-3403-4ffb-8c10-ba53ff2aa0a0\" (UID: \"64e4d685-3403-4ffb-8c10-ba53ff2aa0a0\") " Sep 29 20:00:03 crc kubenswrapper[4792]: I0929 20:00:03.506065 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/64e4d685-3403-4ffb-8c10-ba53ff2aa0a0-config-volume" (OuterVolumeSpecName: "config-volume") pod "64e4d685-3403-4ffb-8c10-ba53ff2aa0a0" (UID: "64e4d685-3403-4ffb-8c10-ba53ff2aa0a0"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 20:00:03 crc kubenswrapper[4792]: I0929 20:00:03.512549 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/64e4d685-3403-4ffb-8c10-ba53ff2aa0a0-kube-api-access-xcnf2" (OuterVolumeSpecName: "kube-api-access-xcnf2") pod "64e4d685-3403-4ffb-8c10-ba53ff2aa0a0" (UID: "64e4d685-3403-4ffb-8c10-ba53ff2aa0a0"). InnerVolumeSpecName "kube-api-access-xcnf2". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 20:00:03 crc kubenswrapper[4792]: I0929 20:00:03.512830 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/64e4d685-3403-4ffb-8c10-ba53ff2aa0a0-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "64e4d685-3403-4ffb-8c10-ba53ff2aa0a0" (UID: "64e4d685-3403-4ffb-8c10-ba53ff2aa0a0"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 20:00:03 crc kubenswrapper[4792]: I0929 20:00:03.608357 4792 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/64e4d685-3403-4ffb-8c10-ba53ff2aa0a0-config-volume\") on node \"crc\" DevicePath \"\"" Sep 29 20:00:03 crc kubenswrapper[4792]: I0929 20:00:03.608712 4792 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/64e4d685-3403-4ffb-8c10-ba53ff2aa0a0-secret-volume\") on node \"crc\" DevicePath \"\"" Sep 29 20:00:03 crc kubenswrapper[4792]: I0929 20:00:03.608729 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcnf2\" (UniqueName: \"kubernetes.io/projected/64e4d685-3403-4ffb-8c10-ba53ff2aa0a0-kube-api-access-xcnf2\") on node \"crc\" DevicePath \"\"" Sep 29 20:00:03 crc kubenswrapper[4792]: I0929 20:00:03.959940 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29319600-pmk7m" event={"ID":"64e4d685-3403-4ffb-8c10-ba53ff2aa0a0","Type":"ContainerDied","Data":"c3c640850126a855dfe05b9f0eb5ed07f3e8f7792a1e9d8a73520a4eda07ffff"} Sep 29 20:00:03 crc kubenswrapper[4792]: I0929 20:00:03.960005 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319600-pmk7m" Sep 29 20:00:03 crc kubenswrapper[4792]: I0929 20:00:03.959984 4792 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c3c640850126a855dfe05b9f0eb5ed07f3e8f7792a1e9d8a73520a4eda07ffff" Sep 29 20:00:04 crc kubenswrapper[4792]: I0929 20:00:04.545693 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319555-29ttb"] Sep 29 20:00:04 crc kubenswrapper[4792]: I0929 20:00:04.553329 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319555-29ttb"] Sep 29 20:00:05 crc kubenswrapper[4792]: I0929 20:00:05.029063 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ba543e31-cf1d-49bb-8097-ed5d69bc0e2c" path="/var/lib/kubelet/pods/ba543e31-cf1d-49bb-8097-ed5d69bc0e2c/volumes" Sep 29 20:00:11 crc kubenswrapper[4792]: I0929 20:00:11.959528 4792 patch_prober.go:28] interesting pod/machine-config-daemon-p5q59 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 20:00:11 crc kubenswrapper[4792]: I0929 20:00:11.960064 4792 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" podUID="0ae66548-086e-4ca9-bd6f-281ce46e7557" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 20:00:38 crc kubenswrapper[4792]: I0929 20:00:38.625193 4792 scope.go:117] "RemoveContainer" containerID="eec6e0b1a6d04da1ac0d34a22fdf6c655c3f300b62718c0a019d8c1e79aacdef" Sep 29 20:00:41 crc kubenswrapper[4792]: I0929 20:00:41.960055 4792 patch_prober.go:28] interesting pod/machine-config-daemon-p5q59 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 20:00:41 crc kubenswrapper[4792]: I0929 20:00:41.960625 4792 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" podUID="0ae66548-086e-4ca9-bd6f-281ce46e7557" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 20:00:41 crc kubenswrapper[4792]: I0929 20:00:41.960670 4792 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" Sep 29 20:00:41 crc kubenswrapper[4792]: I0929 20:00:41.961426 4792 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"6b9ec2c21abcc9abf3154171029946c7d3fe35239bc638c8d343773772ac556c"} pod="openshift-machine-config-operator/machine-config-daemon-p5q59" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 29 20:00:41 crc kubenswrapper[4792]: I0929 20:00:41.961472 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" podUID="0ae66548-086e-4ca9-bd6f-281ce46e7557" containerName="machine-config-daemon" containerID="cri-o://6b9ec2c21abcc9abf3154171029946c7d3fe35239bc638c8d343773772ac556c" gracePeriod=600 Sep 29 20:00:42 crc kubenswrapper[4792]: E0929 20:00:42.084694 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p5q59_openshift-machine-config-operator(0ae66548-086e-4ca9-bd6f-281ce46e7557)\"" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" podUID="0ae66548-086e-4ca9-bd6f-281ce46e7557" Sep 29 20:00:42 crc kubenswrapper[4792]: I0929 20:00:42.336408 4792 generic.go:334] "Generic (PLEG): container finished" podID="0ae66548-086e-4ca9-bd6f-281ce46e7557" containerID="6b9ec2c21abcc9abf3154171029946c7d3fe35239bc638c8d343773772ac556c" exitCode=0 Sep 29 20:00:42 crc kubenswrapper[4792]: I0929 20:00:42.336463 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" event={"ID":"0ae66548-086e-4ca9-bd6f-281ce46e7557","Type":"ContainerDied","Data":"6b9ec2c21abcc9abf3154171029946c7d3fe35239bc638c8d343773772ac556c"} Sep 29 20:00:42 crc kubenswrapper[4792]: I0929 20:00:42.337611 4792 scope.go:117] "RemoveContainer" containerID="9aa3c4a784136c52348a1844c48c7382e91e1b68cf942188a07e1a234d234270" Sep 29 20:00:42 crc kubenswrapper[4792]: I0929 20:00:42.337744 4792 scope.go:117] "RemoveContainer" containerID="6b9ec2c21abcc9abf3154171029946c7d3fe35239bc638c8d343773772ac556c" Sep 29 20:00:42 crc kubenswrapper[4792]: E0929 20:00:42.338153 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p5q59_openshift-machine-config-operator(0ae66548-086e-4ca9-bd6f-281ce46e7557)\"" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" podUID="0ae66548-086e-4ca9-bd6f-281ce46e7557" Sep 29 20:00:55 crc kubenswrapper[4792]: I0929 20:00:55.015444 4792 scope.go:117] "RemoveContainer" containerID="6b9ec2c21abcc9abf3154171029946c7d3fe35239bc638c8d343773772ac556c" Sep 29 20:00:55 crc kubenswrapper[4792]: E0929 20:00:55.016442 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p5q59_openshift-machine-config-operator(0ae66548-086e-4ca9-bd6f-281ce46e7557)\"" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" podUID="0ae66548-086e-4ca9-bd6f-281ce46e7557" Sep 29 20:01:00 crc kubenswrapper[4792]: I0929 20:01:00.146031 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-cron-29319601-w5m7k"] Sep 29 20:01:00 crc kubenswrapper[4792]: E0929 20:01:00.148446 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="64e4d685-3403-4ffb-8c10-ba53ff2aa0a0" containerName="collect-profiles" Sep 29 20:01:00 crc kubenswrapper[4792]: I0929 20:01:00.148561 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="64e4d685-3403-4ffb-8c10-ba53ff2aa0a0" containerName="collect-profiles" Sep 29 20:01:00 crc kubenswrapper[4792]: I0929 20:01:00.148961 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="64e4d685-3403-4ffb-8c10-ba53ff2aa0a0" containerName="collect-profiles" Sep 29 20:01:00 crc kubenswrapper[4792]: I0929 20:01:00.149796 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29319601-w5m7k" Sep 29 20:01:00 crc kubenswrapper[4792]: I0929 20:01:00.160793 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-cron-29319601-w5m7k"] Sep 29 20:01:00 crc kubenswrapper[4792]: I0929 20:01:00.279116 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mdgn9\" (UniqueName: \"kubernetes.io/projected/6885051d-f7a5-4076-a670-778fbd8d23ca-kube-api-access-mdgn9\") pod \"keystone-cron-29319601-w5m7k\" (UID: \"6885051d-f7a5-4076-a670-778fbd8d23ca\") " pod="openstack/keystone-cron-29319601-w5m7k" Sep 29 20:01:00 crc kubenswrapper[4792]: I0929 20:01:00.279172 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/6885051d-f7a5-4076-a670-778fbd8d23ca-fernet-keys\") pod \"keystone-cron-29319601-w5m7k\" (UID: \"6885051d-f7a5-4076-a670-778fbd8d23ca\") " pod="openstack/keystone-cron-29319601-w5m7k" Sep 29 20:01:00 crc kubenswrapper[4792]: I0929 20:01:00.279325 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6885051d-f7a5-4076-a670-778fbd8d23ca-combined-ca-bundle\") pod \"keystone-cron-29319601-w5m7k\" (UID: \"6885051d-f7a5-4076-a670-778fbd8d23ca\") " pod="openstack/keystone-cron-29319601-w5m7k" Sep 29 20:01:00 crc kubenswrapper[4792]: I0929 20:01:00.279544 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6885051d-f7a5-4076-a670-778fbd8d23ca-config-data\") pod \"keystone-cron-29319601-w5m7k\" (UID: \"6885051d-f7a5-4076-a670-778fbd8d23ca\") " pod="openstack/keystone-cron-29319601-w5m7k" Sep 29 20:01:00 crc kubenswrapper[4792]: I0929 20:01:00.381692 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6885051d-f7a5-4076-a670-778fbd8d23ca-combined-ca-bundle\") pod \"keystone-cron-29319601-w5m7k\" (UID: \"6885051d-f7a5-4076-a670-778fbd8d23ca\") " pod="openstack/keystone-cron-29319601-w5m7k" Sep 29 20:01:00 crc kubenswrapper[4792]: I0929 20:01:00.381807 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6885051d-f7a5-4076-a670-778fbd8d23ca-config-data\") pod \"keystone-cron-29319601-w5m7k\" (UID: \"6885051d-f7a5-4076-a670-778fbd8d23ca\") " pod="openstack/keystone-cron-29319601-w5m7k" Sep 29 20:01:00 crc kubenswrapper[4792]: I0929 20:01:00.381980 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mdgn9\" (UniqueName: \"kubernetes.io/projected/6885051d-f7a5-4076-a670-778fbd8d23ca-kube-api-access-mdgn9\") pod \"keystone-cron-29319601-w5m7k\" (UID: \"6885051d-f7a5-4076-a670-778fbd8d23ca\") " pod="openstack/keystone-cron-29319601-w5m7k" Sep 29 20:01:00 crc kubenswrapper[4792]: I0929 20:01:00.382011 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/6885051d-f7a5-4076-a670-778fbd8d23ca-fernet-keys\") pod \"keystone-cron-29319601-w5m7k\" (UID: \"6885051d-f7a5-4076-a670-778fbd8d23ca\") " pod="openstack/keystone-cron-29319601-w5m7k" Sep 29 20:01:00 crc kubenswrapper[4792]: I0929 20:01:00.389419 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6885051d-f7a5-4076-a670-778fbd8d23ca-config-data\") pod \"keystone-cron-29319601-w5m7k\" (UID: \"6885051d-f7a5-4076-a670-778fbd8d23ca\") " pod="openstack/keystone-cron-29319601-w5m7k" Sep 29 20:01:00 crc kubenswrapper[4792]: I0929 20:01:00.389880 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/6885051d-f7a5-4076-a670-778fbd8d23ca-fernet-keys\") pod \"keystone-cron-29319601-w5m7k\" (UID: \"6885051d-f7a5-4076-a670-778fbd8d23ca\") " pod="openstack/keystone-cron-29319601-w5m7k" Sep 29 20:01:00 crc kubenswrapper[4792]: I0929 20:01:00.397386 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6885051d-f7a5-4076-a670-778fbd8d23ca-combined-ca-bundle\") pod \"keystone-cron-29319601-w5m7k\" (UID: \"6885051d-f7a5-4076-a670-778fbd8d23ca\") " pod="openstack/keystone-cron-29319601-w5m7k" Sep 29 20:01:00 crc kubenswrapper[4792]: I0929 20:01:00.407131 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mdgn9\" (UniqueName: \"kubernetes.io/projected/6885051d-f7a5-4076-a670-778fbd8d23ca-kube-api-access-mdgn9\") pod \"keystone-cron-29319601-w5m7k\" (UID: \"6885051d-f7a5-4076-a670-778fbd8d23ca\") " pod="openstack/keystone-cron-29319601-w5m7k" Sep 29 20:01:00 crc kubenswrapper[4792]: I0929 20:01:00.465968 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29319601-w5m7k" Sep 29 20:01:00 crc kubenswrapper[4792]: I0929 20:01:00.910274 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-cron-29319601-w5m7k"] Sep 29 20:01:01 crc kubenswrapper[4792]: I0929 20:01:01.497356 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29319601-w5m7k" event={"ID":"6885051d-f7a5-4076-a670-778fbd8d23ca","Type":"ContainerStarted","Data":"9770af2c0203297cd87c72912679c0fa044bca1867377087d72a92825d09dcf2"} Sep 29 20:01:01 crc kubenswrapper[4792]: I0929 20:01:01.497563 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29319601-w5m7k" event={"ID":"6885051d-f7a5-4076-a670-778fbd8d23ca","Type":"ContainerStarted","Data":"d5cdadc82c1ba357efc326e7f0411620c5b0212c6a181b79e46b1f6c84b7d3a8"} Sep 29 20:01:01 crc kubenswrapper[4792]: I0929 20:01:01.567958 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-cron-29319601-w5m7k" podStartSLOduration=1.5679419110000001 podStartE2EDuration="1.567941911s" podCreationTimestamp="2025-09-29 20:01:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 20:01:01.565045366 +0000 UTC m=+3873.558352762" watchObservedRunningTime="2025-09-29 20:01:01.567941911 +0000 UTC m=+3873.561249307" Sep 29 20:01:04 crc kubenswrapper[4792]: I0929 20:01:04.523166 4792 generic.go:334] "Generic (PLEG): container finished" podID="6885051d-f7a5-4076-a670-778fbd8d23ca" containerID="9770af2c0203297cd87c72912679c0fa044bca1867377087d72a92825d09dcf2" exitCode=0 Sep 29 20:01:04 crc kubenswrapper[4792]: I0929 20:01:04.523259 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29319601-w5m7k" event={"ID":"6885051d-f7a5-4076-a670-778fbd8d23ca","Type":"ContainerDied","Data":"9770af2c0203297cd87c72912679c0fa044bca1867377087d72a92825d09dcf2"} Sep 29 20:01:06 crc kubenswrapper[4792]: I0929 20:01:06.015135 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29319601-w5m7k" Sep 29 20:01:06 crc kubenswrapper[4792]: I0929 20:01:06.084297 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/6885051d-f7a5-4076-a670-778fbd8d23ca-fernet-keys\") pod \"6885051d-f7a5-4076-a670-778fbd8d23ca\" (UID: \"6885051d-f7a5-4076-a670-778fbd8d23ca\") " Sep 29 20:01:06 crc kubenswrapper[4792]: I0929 20:01:06.084397 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mdgn9\" (UniqueName: \"kubernetes.io/projected/6885051d-f7a5-4076-a670-778fbd8d23ca-kube-api-access-mdgn9\") pod \"6885051d-f7a5-4076-a670-778fbd8d23ca\" (UID: \"6885051d-f7a5-4076-a670-778fbd8d23ca\") " Sep 29 20:01:06 crc kubenswrapper[4792]: I0929 20:01:06.084593 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6885051d-f7a5-4076-a670-778fbd8d23ca-config-data\") pod \"6885051d-f7a5-4076-a670-778fbd8d23ca\" (UID: \"6885051d-f7a5-4076-a670-778fbd8d23ca\") " Sep 29 20:01:06 crc kubenswrapper[4792]: I0929 20:01:06.084873 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6885051d-f7a5-4076-a670-778fbd8d23ca-combined-ca-bundle\") pod \"6885051d-f7a5-4076-a670-778fbd8d23ca\" (UID: \"6885051d-f7a5-4076-a670-778fbd8d23ca\") " Sep 29 20:01:06 crc kubenswrapper[4792]: I0929 20:01:06.107202 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6885051d-f7a5-4076-a670-778fbd8d23ca-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "6885051d-f7a5-4076-a670-778fbd8d23ca" (UID: "6885051d-f7a5-4076-a670-778fbd8d23ca"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 20:01:06 crc kubenswrapper[4792]: I0929 20:01:06.107909 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6885051d-f7a5-4076-a670-778fbd8d23ca-kube-api-access-mdgn9" (OuterVolumeSpecName: "kube-api-access-mdgn9") pod "6885051d-f7a5-4076-a670-778fbd8d23ca" (UID: "6885051d-f7a5-4076-a670-778fbd8d23ca"). InnerVolumeSpecName "kube-api-access-mdgn9". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 20:01:06 crc kubenswrapper[4792]: I0929 20:01:06.145320 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6885051d-f7a5-4076-a670-778fbd8d23ca-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "6885051d-f7a5-4076-a670-778fbd8d23ca" (UID: "6885051d-f7a5-4076-a670-778fbd8d23ca"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 20:01:06 crc kubenswrapper[4792]: I0929 20:01:06.186500 4792 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/6885051d-f7a5-4076-a670-778fbd8d23ca-fernet-keys\") on node \"crc\" DevicePath \"\"" Sep 29 20:01:06 crc kubenswrapper[4792]: I0929 20:01:06.186620 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mdgn9\" (UniqueName: \"kubernetes.io/projected/6885051d-f7a5-4076-a670-778fbd8d23ca-kube-api-access-mdgn9\") on node \"crc\" DevicePath \"\"" Sep 29 20:01:06 crc kubenswrapper[4792]: I0929 20:01:06.186656 4792 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6885051d-f7a5-4076-a670-778fbd8d23ca-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 20:01:06 crc kubenswrapper[4792]: I0929 20:01:06.216977 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6885051d-f7a5-4076-a670-778fbd8d23ca-config-data" (OuterVolumeSpecName: "config-data") pod "6885051d-f7a5-4076-a670-778fbd8d23ca" (UID: "6885051d-f7a5-4076-a670-778fbd8d23ca"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 20:01:06 crc kubenswrapper[4792]: I0929 20:01:06.291033 4792 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6885051d-f7a5-4076-a670-778fbd8d23ca-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 20:01:06 crc kubenswrapper[4792]: I0929 20:01:06.539272 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29319601-w5m7k" event={"ID":"6885051d-f7a5-4076-a670-778fbd8d23ca","Type":"ContainerDied","Data":"d5cdadc82c1ba357efc326e7f0411620c5b0212c6a181b79e46b1f6c84b7d3a8"} Sep 29 20:01:06 crc kubenswrapper[4792]: I0929 20:01:06.539318 4792 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d5cdadc82c1ba357efc326e7f0411620c5b0212c6a181b79e46b1f6c84b7d3a8" Sep 29 20:01:06 crc kubenswrapper[4792]: I0929 20:01:06.539321 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29319601-w5m7k" Sep 29 20:01:07 crc kubenswrapper[4792]: I0929 20:01:07.016556 4792 scope.go:117] "RemoveContainer" containerID="6b9ec2c21abcc9abf3154171029946c7d3fe35239bc638c8d343773772ac556c" Sep 29 20:01:07 crc kubenswrapper[4792]: E0929 20:01:07.016769 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p5q59_openshift-machine-config-operator(0ae66548-086e-4ca9-bd6f-281ce46e7557)\"" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" podUID="0ae66548-086e-4ca9-bd6f-281ce46e7557" Sep 29 20:01:18 crc kubenswrapper[4792]: I0929 20:01:18.015527 4792 scope.go:117] "RemoveContainer" containerID="6b9ec2c21abcc9abf3154171029946c7d3fe35239bc638c8d343773772ac556c" Sep 29 20:01:18 crc kubenswrapper[4792]: E0929 20:01:18.016420 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p5q59_openshift-machine-config-operator(0ae66548-086e-4ca9-bd6f-281ce46e7557)\"" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" podUID="0ae66548-086e-4ca9-bd6f-281ce46e7557" Sep 29 20:01:18 crc kubenswrapper[4792]: I0929 20:01:18.080619 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-n2pvj"] Sep 29 20:01:18 crc kubenswrapper[4792]: E0929 20:01:18.081056 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6885051d-f7a5-4076-a670-778fbd8d23ca" containerName="keystone-cron" Sep 29 20:01:18 crc kubenswrapper[4792]: I0929 20:01:18.081077 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="6885051d-f7a5-4076-a670-778fbd8d23ca" containerName="keystone-cron" Sep 29 20:01:18 crc kubenswrapper[4792]: I0929 20:01:18.081319 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="6885051d-f7a5-4076-a670-778fbd8d23ca" containerName="keystone-cron" Sep 29 20:01:18 crc kubenswrapper[4792]: I0929 20:01:18.082898 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-n2pvj" Sep 29 20:01:18 crc kubenswrapper[4792]: I0929 20:01:18.104866 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-n2pvj"] Sep 29 20:01:18 crc kubenswrapper[4792]: I0929 20:01:18.208816 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8a00e222-e33a-4484-b24c-8373cbec3fce-utilities\") pod \"community-operators-n2pvj\" (UID: \"8a00e222-e33a-4484-b24c-8373cbec3fce\") " pod="openshift-marketplace/community-operators-n2pvj" Sep 29 20:01:18 crc kubenswrapper[4792]: I0929 20:01:18.209142 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8a00e222-e33a-4484-b24c-8373cbec3fce-catalog-content\") pod \"community-operators-n2pvj\" (UID: \"8a00e222-e33a-4484-b24c-8373cbec3fce\") " pod="openshift-marketplace/community-operators-n2pvj" Sep 29 20:01:18 crc kubenswrapper[4792]: I0929 20:01:18.209276 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kc4vk\" (UniqueName: \"kubernetes.io/projected/8a00e222-e33a-4484-b24c-8373cbec3fce-kube-api-access-kc4vk\") pod \"community-operators-n2pvj\" (UID: \"8a00e222-e33a-4484-b24c-8373cbec3fce\") " pod="openshift-marketplace/community-operators-n2pvj" Sep 29 20:01:18 crc kubenswrapper[4792]: I0929 20:01:18.311316 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kc4vk\" (UniqueName: \"kubernetes.io/projected/8a00e222-e33a-4484-b24c-8373cbec3fce-kube-api-access-kc4vk\") pod \"community-operators-n2pvj\" (UID: \"8a00e222-e33a-4484-b24c-8373cbec3fce\") " pod="openshift-marketplace/community-operators-n2pvj" Sep 29 20:01:18 crc kubenswrapper[4792]: I0929 20:01:18.311411 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8a00e222-e33a-4484-b24c-8373cbec3fce-utilities\") pod \"community-operators-n2pvj\" (UID: \"8a00e222-e33a-4484-b24c-8373cbec3fce\") " pod="openshift-marketplace/community-operators-n2pvj" Sep 29 20:01:18 crc kubenswrapper[4792]: I0929 20:01:18.311458 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8a00e222-e33a-4484-b24c-8373cbec3fce-catalog-content\") pod \"community-operators-n2pvj\" (UID: \"8a00e222-e33a-4484-b24c-8373cbec3fce\") " pod="openshift-marketplace/community-operators-n2pvj" Sep 29 20:01:18 crc kubenswrapper[4792]: I0929 20:01:18.312178 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8a00e222-e33a-4484-b24c-8373cbec3fce-catalog-content\") pod \"community-operators-n2pvj\" (UID: \"8a00e222-e33a-4484-b24c-8373cbec3fce\") " pod="openshift-marketplace/community-operators-n2pvj" Sep 29 20:01:18 crc kubenswrapper[4792]: I0929 20:01:18.312709 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8a00e222-e33a-4484-b24c-8373cbec3fce-utilities\") pod \"community-operators-n2pvj\" (UID: \"8a00e222-e33a-4484-b24c-8373cbec3fce\") " pod="openshift-marketplace/community-operators-n2pvj" Sep 29 20:01:18 crc kubenswrapper[4792]: I0929 20:01:18.329451 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kc4vk\" (UniqueName: \"kubernetes.io/projected/8a00e222-e33a-4484-b24c-8373cbec3fce-kube-api-access-kc4vk\") pod \"community-operators-n2pvj\" (UID: \"8a00e222-e33a-4484-b24c-8373cbec3fce\") " pod="openshift-marketplace/community-operators-n2pvj" Sep 29 20:01:18 crc kubenswrapper[4792]: I0929 20:01:18.407151 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-n2pvj" Sep 29 20:01:19 crc kubenswrapper[4792]: I0929 20:01:19.002420 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-n2pvj"] Sep 29 20:01:19 crc kubenswrapper[4792]: I0929 20:01:19.682926 4792 generic.go:334] "Generic (PLEG): container finished" podID="8a00e222-e33a-4484-b24c-8373cbec3fce" containerID="0124d37c95705d130548fc7385b55f038ac986d46bac2121797f0c3b2db108f6" exitCode=0 Sep 29 20:01:19 crc kubenswrapper[4792]: I0929 20:01:19.683206 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-n2pvj" event={"ID":"8a00e222-e33a-4484-b24c-8373cbec3fce","Type":"ContainerDied","Data":"0124d37c95705d130548fc7385b55f038ac986d46bac2121797f0c3b2db108f6"} Sep 29 20:01:19 crc kubenswrapper[4792]: I0929 20:01:19.683245 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-n2pvj" event={"ID":"8a00e222-e33a-4484-b24c-8373cbec3fce","Type":"ContainerStarted","Data":"02b24a3d141819bf6c22bf1de918abb118678ca47fc4d0e06d9f794f337fe774"} Sep 29 20:01:20 crc kubenswrapper[4792]: I0929 20:01:20.692999 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-n2pvj" event={"ID":"8a00e222-e33a-4484-b24c-8373cbec3fce","Type":"ContainerStarted","Data":"d65ed3c93f719f0d57d1b5d54980efa4ec618a72e51b94e02c2da350ae6704cc"} Sep 29 20:01:22 crc kubenswrapper[4792]: I0929 20:01:22.709167 4792 generic.go:334] "Generic (PLEG): container finished" podID="8a00e222-e33a-4484-b24c-8373cbec3fce" containerID="d65ed3c93f719f0d57d1b5d54980efa4ec618a72e51b94e02c2da350ae6704cc" exitCode=0 Sep 29 20:01:22 crc kubenswrapper[4792]: I0929 20:01:22.709240 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-n2pvj" event={"ID":"8a00e222-e33a-4484-b24c-8373cbec3fce","Type":"ContainerDied","Data":"d65ed3c93f719f0d57d1b5d54980efa4ec618a72e51b94e02c2da350ae6704cc"} Sep 29 20:01:23 crc kubenswrapper[4792]: I0929 20:01:23.720761 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-n2pvj" event={"ID":"8a00e222-e33a-4484-b24c-8373cbec3fce","Type":"ContainerStarted","Data":"25232f36380f62a6de4cd1801407c509944cc35a14d74d8ffe8e1111cec47941"} Sep 29 20:01:23 crc kubenswrapper[4792]: I0929 20:01:23.743966 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-n2pvj" podStartSLOduration=2.23089566 podStartE2EDuration="5.743942664s" podCreationTimestamp="2025-09-29 20:01:18 +0000 UTC" firstStartedPulling="2025-09-29 20:01:19.685098577 +0000 UTC m=+3891.678406003" lastFinishedPulling="2025-09-29 20:01:23.198145611 +0000 UTC m=+3895.191453007" observedRunningTime="2025-09-29 20:01:23.741464319 +0000 UTC m=+3895.734771735" watchObservedRunningTime="2025-09-29 20:01:23.743942664 +0000 UTC m=+3895.737250070" Sep 29 20:01:28 crc kubenswrapper[4792]: I0929 20:01:28.434174 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-n2pvj" Sep 29 20:01:28 crc kubenswrapper[4792]: I0929 20:01:28.435694 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-n2pvj" Sep 29 20:01:28 crc kubenswrapper[4792]: I0929 20:01:28.490296 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-n2pvj" Sep 29 20:01:28 crc kubenswrapper[4792]: I0929 20:01:28.849283 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-n2pvj" Sep 29 20:01:28 crc kubenswrapper[4792]: I0929 20:01:28.905181 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-n2pvj"] Sep 29 20:01:30 crc kubenswrapper[4792]: I0929 20:01:30.784782 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-n2pvj" podUID="8a00e222-e33a-4484-b24c-8373cbec3fce" containerName="registry-server" containerID="cri-o://25232f36380f62a6de4cd1801407c509944cc35a14d74d8ffe8e1111cec47941" gracePeriod=2 Sep 29 20:01:31 crc kubenswrapper[4792]: I0929 20:01:31.016091 4792 scope.go:117] "RemoveContainer" containerID="6b9ec2c21abcc9abf3154171029946c7d3fe35239bc638c8d343773772ac556c" Sep 29 20:01:31 crc kubenswrapper[4792]: E0929 20:01:31.016449 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p5q59_openshift-machine-config-operator(0ae66548-086e-4ca9-bd6f-281ce46e7557)\"" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" podUID="0ae66548-086e-4ca9-bd6f-281ce46e7557" Sep 29 20:01:31 crc kubenswrapper[4792]: I0929 20:01:31.808514 4792 generic.go:334] "Generic (PLEG): container finished" podID="8a00e222-e33a-4484-b24c-8373cbec3fce" containerID="25232f36380f62a6de4cd1801407c509944cc35a14d74d8ffe8e1111cec47941" exitCode=0 Sep 29 20:01:31 crc kubenswrapper[4792]: I0929 20:01:31.808657 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-n2pvj" event={"ID":"8a00e222-e33a-4484-b24c-8373cbec3fce","Type":"ContainerDied","Data":"25232f36380f62a6de4cd1801407c509944cc35a14d74d8ffe8e1111cec47941"} Sep 29 20:01:31 crc kubenswrapper[4792]: I0929 20:01:31.979048 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-n2pvj" Sep 29 20:01:32 crc kubenswrapper[4792]: I0929 20:01:32.115750 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8a00e222-e33a-4484-b24c-8373cbec3fce-utilities\") pod \"8a00e222-e33a-4484-b24c-8373cbec3fce\" (UID: \"8a00e222-e33a-4484-b24c-8373cbec3fce\") " Sep 29 20:01:32 crc kubenswrapper[4792]: I0929 20:01:32.115928 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kc4vk\" (UniqueName: \"kubernetes.io/projected/8a00e222-e33a-4484-b24c-8373cbec3fce-kube-api-access-kc4vk\") pod \"8a00e222-e33a-4484-b24c-8373cbec3fce\" (UID: \"8a00e222-e33a-4484-b24c-8373cbec3fce\") " Sep 29 20:01:32 crc kubenswrapper[4792]: I0929 20:01:32.115956 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8a00e222-e33a-4484-b24c-8373cbec3fce-catalog-content\") pod \"8a00e222-e33a-4484-b24c-8373cbec3fce\" (UID: \"8a00e222-e33a-4484-b24c-8373cbec3fce\") " Sep 29 20:01:32 crc kubenswrapper[4792]: I0929 20:01:32.117025 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8a00e222-e33a-4484-b24c-8373cbec3fce-utilities" (OuterVolumeSpecName: "utilities") pod "8a00e222-e33a-4484-b24c-8373cbec3fce" (UID: "8a00e222-e33a-4484-b24c-8373cbec3fce"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 20:01:32 crc kubenswrapper[4792]: I0929 20:01:32.123131 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8a00e222-e33a-4484-b24c-8373cbec3fce-kube-api-access-kc4vk" (OuterVolumeSpecName: "kube-api-access-kc4vk") pod "8a00e222-e33a-4484-b24c-8373cbec3fce" (UID: "8a00e222-e33a-4484-b24c-8373cbec3fce"). InnerVolumeSpecName "kube-api-access-kc4vk". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 20:01:32 crc kubenswrapper[4792]: I0929 20:01:32.162098 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8a00e222-e33a-4484-b24c-8373cbec3fce-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "8a00e222-e33a-4484-b24c-8373cbec3fce" (UID: "8a00e222-e33a-4484-b24c-8373cbec3fce"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 20:01:32 crc kubenswrapper[4792]: I0929 20:01:32.217817 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kc4vk\" (UniqueName: \"kubernetes.io/projected/8a00e222-e33a-4484-b24c-8373cbec3fce-kube-api-access-kc4vk\") on node \"crc\" DevicePath \"\"" Sep 29 20:01:32 crc kubenswrapper[4792]: I0929 20:01:32.217875 4792 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8a00e222-e33a-4484-b24c-8373cbec3fce-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 20:01:32 crc kubenswrapper[4792]: I0929 20:01:32.217890 4792 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8a00e222-e33a-4484-b24c-8373cbec3fce-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 20:01:32 crc kubenswrapper[4792]: I0929 20:01:32.824748 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-n2pvj" event={"ID":"8a00e222-e33a-4484-b24c-8373cbec3fce","Type":"ContainerDied","Data":"02b24a3d141819bf6c22bf1de918abb118678ca47fc4d0e06d9f794f337fe774"} Sep 29 20:01:32 crc kubenswrapper[4792]: I0929 20:01:32.824808 4792 scope.go:117] "RemoveContainer" containerID="25232f36380f62a6de4cd1801407c509944cc35a14d74d8ffe8e1111cec47941" Sep 29 20:01:32 crc kubenswrapper[4792]: I0929 20:01:32.824840 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-n2pvj" Sep 29 20:01:32 crc kubenswrapper[4792]: I0929 20:01:32.846055 4792 scope.go:117] "RemoveContainer" containerID="d65ed3c93f719f0d57d1b5d54980efa4ec618a72e51b94e02c2da350ae6704cc" Sep 29 20:01:32 crc kubenswrapper[4792]: I0929 20:01:32.889078 4792 scope.go:117] "RemoveContainer" containerID="0124d37c95705d130548fc7385b55f038ac986d46bac2121797f0c3b2db108f6" Sep 29 20:01:32 crc kubenswrapper[4792]: I0929 20:01:32.889194 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-n2pvj"] Sep 29 20:01:32 crc kubenswrapper[4792]: I0929 20:01:32.899482 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-n2pvj"] Sep 29 20:01:33 crc kubenswrapper[4792]: I0929 20:01:33.030771 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8a00e222-e33a-4484-b24c-8373cbec3fce" path="/var/lib/kubelet/pods/8a00e222-e33a-4484-b24c-8373cbec3fce/volumes" Sep 29 20:01:46 crc kubenswrapper[4792]: I0929 20:01:46.015038 4792 scope.go:117] "RemoveContainer" containerID="6b9ec2c21abcc9abf3154171029946c7d3fe35239bc638c8d343773772ac556c" Sep 29 20:01:46 crc kubenswrapper[4792]: E0929 20:01:46.015738 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p5q59_openshift-machine-config-operator(0ae66548-086e-4ca9-bd6f-281ce46e7557)\"" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" podUID="0ae66548-086e-4ca9-bd6f-281ce46e7557" Sep 29 20:02:01 crc kubenswrapper[4792]: I0929 20:02:01.015416 4792 scope.go:117] "RemoveContainer" containerID="6b9ec2c21abcc9abf3154171029946c7d3fe35239bc638c8d343773772ac556c" Sep 29 20:02:01 crc kubenswrapper[4792]: E0929 20:02:01.016213 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p5q59_openshift-machine-config-operator(0ae66548-086e-4ca9-bd6f-281ce46e7557)\"" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" podUID="0ae66548-086e-4ca9-bd6f-281ce46e7557" Sep 29 20:02:16 crc kubenswrapper[4792]: I0929 20:02:16.015713 4792 scope.go:117] "RemoveContainer" containerID="6b9ec2c21abcc9abf3154171029946c7d3fe35239bc638c8d343773772ac556c" Sep 29 20:02:16 crc kubenswrapper[4792]: E0929 20:02:16.016598 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p5q59_openshift-machine-config-operator(0ae66548-086e-4ca9-bd6f-281ce46e7557)\"" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" podUID="0ae66548-086e-4ca9-bd6f-281ce46e7557" Sep 29 20:02:28 crc kubenswrapper[4792]: I0929 20:02:28.016004 4792 scope.go:117] "RemoveContainer" containerID="6b9ec2c21abcc9abf3154171029946c7d3fe35239bc638c8d343773772ac556c" Sep 29 20:02:28 crc kubenswrapper[4792]: E0929 20:02:28.018102 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p5q59_openshift-machine-config-operator(0ae66548-086e-4ca9-bd6f-281ce46e7557)\"" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" podUID="0ae66548-086e-4ca9-bd6f-281ce46e7557" Sep 29 20:02:41 crc kubenswrapper[4792]: I0929 20:02:41.014874 4792 scope.go:117] "RemoveContainer" containerID="6b9ec2c21abcc9abf3154171029946c7d3fe35239bc638c8d343773772ac556c" Sep 29 20:02:41 crc kubenswrapper[4792]: E0929 20:02:41.015595 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p5q59_openshift-machine-config-operator(0ae66548-086e-4ca9-bd6f-281ce46e7557)\"" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" podUID="0ae66548-086e-4ca9-bd6f-281ce46e7557" Sep 29 20:02:53 crc kubenswrapper[4792]: I0929 20:02:53.015651 4792 scope.go:117] "RemoveContainer" containerID="6b9ec2c21abcc9abf3154171029946c7d3fe35239bc638c8d343773772ac556c" Sep 29 20:02:53 crc kubenswrapper[4792]: E0929 20:02:53.016411 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p5q59_openshift-machine-config-operator(0ae66548-086e-4ca9-bd6f-281ce46e7557)\"" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" podUID="0ae66548-086e-4ca9-bd6f-281ce46e7557" Sep 29 20:03:04 crc kubenswrapper[4792]: I0929 20:03:04.015763 4792 scope.go:117] "RemoveContainer" containerID="6b9ec2c21abcc9abf3154171029946c7d3fe35239bc638c8d343773772ac556c" Sep 29 20:03:04 crc kubenswrapper[4792]: E0929 20:03:04.016591 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p5q59_openshift-machine-config-operator(0ae66548-086e-4ca9-bd6f-281ce46e7557)\"" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" podUID="0ae66548-086e-4ca9-bd6f-281ce46e7557" Sep 29 20:03:17 crc kubenswrapper[4792]: I0929 20:03:17.016022 4792 scope.go:117] "RemoveContainer" containerID="6b9ec2c21abcc9abf3154171029946c7d3fe35239bc638c8d343773772ac556c" Sep 29 20:03:17 crc kubenswrapper[4792]: E0929 20:03:17.016694 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p5q59_openshift-machine-config-operator(0ae66548-086e-4ca9-bd6f-281ce46e7557)\"" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" podUID="0ae66548-086e-4ca9-bd6f-281ce46e7557" Sep 29 20:03:32 crc kubenswrapper[4792]: I0929 20:03:32.016299 4792 scope.go:117] "RemoveContainer" containerID="6b9ec2c21abcc9abf3154171029946c7d3fe35239bc638c8d343773772ac556c" Sep 29 20:03:32 crc kubenswrapper[4792]: E0929 20:03:32.017454 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p5q59_openshift-machine-config-operator(0ae66548-086e-4ca9-bd6f-281ce46e7557)\"" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" podUID="0ae66548-086e-4ca9-bd6f-281ce46e7557" Sep 29 20:03:43 crc kubenswrapper[4792]: I0929 20:03:43.532380 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-ffrc6"] Sep 29 20:03:43 crc kubenswrapper[4792]: E0929 20:03:43.533285 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8a00e222-e33a-4484-b24c-8373cbec3fce" containerName="extract-utilities" Sep 29 20:03:43 crc kubenswrapper[4792]: I0929 20:03:43.533298 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="8a00e222-e33a-4484-b24c-8373cbec3fce" containerName="extract-utilities" Sep 29 20:03:43 crc kubenswrapper[4792]: E0929 20:03:43.533316 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8a00e222-e33a-4484-b24c-8373cbec3fce" containerName="extract-content" Sep 29 20:03:43 crc kubenswrapper[4792]: I0929 20:03:43.533322 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="8a00e222-e33a-4484-b24c-8373cbec3fce" containerName="extract-content" Sep 29 20:03:43 crc kubenswrapper[4792]: E0929 20:03:43.533334 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8a00e222-e33a-4484-b24c-8373cbec3fce" containerName="registry-server" Sep 29 20:03:43 crc kubenswrapper[4792]: I0929 20:03:43.533342 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="8a00e222-e33a-4484-b24c-8373cbec3fce" containerName="registry-server" Sep 29 20:03:43 crc kubenswrapper[4792]: I0929 20:03:43.533535 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="8a00e222-e33a-4484-b24c-8373cbec3fce" containerName="registry-server" Sep 29 20:03:43 crc kubenswrapper[4792]: I0929 20:03:43.534792 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-ffrc6" Sep 29 20:03:43 crc kubenswrapper[4792]: I0929 20:03:43.547464 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-ffrc6"] Sep 29 20:03:43 crc kubenswrapper[4792]: I0929 20:03:43.716676 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-26ztm\" (UniqueName: \"kubernetes.io/projected/0cc9c5ca-1a4f-4f06-b12b-6527d00718cb-kube-api-access-26ztm\") pod \"redhat-marketplace-ffrc6\" (UID: \"0cc9c5ca-1a4f-4f06-b12b-6527d00718cb\") " pod="openshift-marketplace/redhat-marketplace-ffrc6" Sep 29 20:03:43 crc kubenswrapper[4792]: I0929 20:03:43.716849 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0cc9c5ca-1a4f-4f06-b12b-6527d00718cb-catalog-content\") pod \"redhat-marketplace-ffrc6\" (UID: \"0cc9c5ca-1a4f-4f06-b12b-6527d00718cb\") " pod="openshift-marketplace/redhat-marketplace-ffrc6" Sep 29 20:03:43 crc kubenswrapper[4792]: I0929 20:03:43.716881 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0cc9c5ca-1a4f-4f06-b12b-6527d00718cb-utilities\") pod \"redhat-marketplace-ffrc6\" (UID: \"0cc9c5ca-1a4f-4f06-b12b-6527d00718cb\") " pod="openshift-marketplace/redhat-marketplace-ffrc6" Sep 29 20:03:43 crc kubenswrapper[4792]: I0929 20:03:43.818828 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0cc9c5ca-1a4f-4f06-b12b-6527d00718cb-catalog-content\") pod \"redhat-marketplace-ffrc6\" (UID: \"0cc9c5ca-1a4f-4f06-b12b-6527d00718cb\") " pod="openshift-marketplace/redhat-marketplace-ffrc6" Sep 29 20:03:43 crc kubenswrapper[4792]: I0929 20:03:43.819138 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0cc9c5ca-1a4f-4f06-b12b-6527d00718cb-utilities\") pod \"redhat-marketplace-ffrc6\" (UID: \"0cc9c5ca-1a4f-4f06-b12b-6527d00718cb\") " pod="openshift-marketplace/redhat-marketplace-ffrc6" Sep 29 20:03:43 crc kubenswrapper[4792]: I0929 20:03:43.819252 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-26ztm\" (UniqueName: \"kubernetes.io/projected/0cc9c5ca-1a4f-4f06-b12b-6527d00718cb-kube-api-access-26ztm\") pod \"redhat-marketplace-ffrc6\" (UID: \"0cc9c5ca-1a4f-4f06-b12b-6527d00718cb\") " pod="openshift-marketplace/redhat-marketplace-ffrc6" Sep 29 20:03:43 crc kubenswrapper[4792]: I0929 20:03:43.819421 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0cc9c5ca-1a4f-4f06-b12b-6527d00718cb-catalog-content\") pod \"redhat-marketplace-ffrc6\" (UID: \"0cc9c5ca-1a4f-4f06-b12b-6527d00718cb\") " pod="openshift-marketplace/redhat-marketplace-ffrc6" Sep 29 20:03:43 crc kubenswrapper[4792]: I0929 20:03:43.819607 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0cc9c5ca-1a4f-4f06-b12b-6527d00718cb-utilities\") pod \"redhat-marketplace-ffrc6\" (UID: \"0cc9c5ca-1a4f-4f06-b12b-6527d00718cb\") " pod="openshift-marketplace/redhat-marketplace-ffrc6" Sep 29 20:03:43 crc kubenswrapper[4792]: I0929 20:03:43.993079 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-26ztm\" (UniqueName: \"kubernetes.io/projected/0cc9c5ca-1a4f-4f06-b12b-6527d00718cb-kube-api-access-26ztm\") pod \"redhat-marketplace-ffrc6\" (UID: \"0cc9c5ca-1a4f-4f06-b12b-6527d00718cb\") " pod="openshift-marketplace/redhat-marketplace-ffrc6" Sep 29 20:03:44 crc kubenswrapper[4792]: I0929 20:03:44.169193 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-ffrc6" Sep 29 20:03:44 crc kubenswrapper[4792]: I0929 20:03:44.643580 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-ffrc6"] Sep 29 20:03:45 crc kubenswrapper[4792]: I0929 20:03:45.130769 4792 generic.go:334] "Generic (PLEG): container finished" podID="0cc9c5ca-1a4f-4f06-b12b-6527d00718cb" containerID="374d58bdae5a91d79885ad6e5a1b5269236f9ca5342e2ec0e787a061d0a3b677" exitCode=0 Sep 29 20:03:45 crc kubenswrapper[4792]: I0929 20:03:45.130810 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-ffrc6" event={"ID":"0cc9c5ca-1a4f-4f06-b12b-6527d00718cb","Type":"ContainerDied","Data":"374d58bdae5a91d79885ad6e5a1b5269236f9ca5342e2ec0e787a061d0a3b677"} Sep 29 20:03:45 crc kubenswrapper[4792]: I0929 20:03:45.130835 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-ffrc6" event={"ID":"0cc9c5ca-1a4f-4f06-b12b-6527d00718cb","Type":"ContainerStarted","Data":"3777192256a9eacd14ee702c0fe1a402cede0965f012a8cf048dc95ae79109df"} Sep 29 20:03:45 crc kubenswrapper[4792]: I0929 20:03:45.132394 4792 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Sep 29 20:03:46 crc kubenswrapper[4792]: I0929 20:03:46.015823 4792 scope.go:117] "RemoveContainer" containerID="6b9ec2c21abcc9abf3154171029946c7d3fe35239bc638c8d343773772ac556c" Sep 29 20:03:46 crc kubenswrapper[4792]: E0929 20:03:46.016541 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p5q59_openshift-machine-config-operator(0ae66548-086e-4ca9-bd6f-281ce46e7557)\"" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" podUID="0ae66548-086e-4ca9-bd6f-281ce46e7557" Sep 29 20:03:46 crc kubenswrapper[4792]: I0929 20:03:46.140170 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-ffrc6" event={"ID":"0cc9c5ca-1a4f-4f06-b12b-6527d00718cb","Type":"ContainerStarted","Data":"8325aace70b5e9218c8df941a86d25d77c63eb5f1ba473e390f92583fb7756c9"} Sep 29 20:03:47 crc kubenswrapper[4792]: I0929 20:03:47.151521 4792 generic.go:334] "Generic (PLEG): container finished" podID="0cc9c5ca-1a4f-4f06-b12b-6527d00718cb" containerID="8325aace70b5e9218c8df941a86d25d77c63eb5f1ba473e390f92583fb7756c9" exitCode=0 Sep 29 20:03:47 crc kubenswrapper[4792]: I0929 20:03:47.151681 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-ffrc6" event={"ID":"0cc9c5ca-1a4f-4f06-b12b-6527d00718cb","Type":"ContainerDied","Data":"8325aace70b5e9218c8df941a86d25d77c63eb5f1ba473e390f92583fb7756c9"} Sep 29 20:03:48 crc kubenswrapper[4792]: I0929 20:03:48.162738 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-ffrc6" event={"ID":"0cc9c5ca-1a4f-4f06-b12b-6527d00718cb","Type":"ContainerStarted","Data":"a31b5666815a3d898fad1605c4fc1db0ca9e1c1f8295a671eb70c2c5cc8dedb8"} Sep 29 20:03:48 crc kubenswrapper[4792]: I0929 20:03:48.182672 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-ffrc6" podStartSLOduration=2.711482412 podStartE2EDuration="5.182653393s" podCreationTimestamp="2025-09-29 20:03:43 +0000 UTC" firstStartedPulling="2025-09-29 20:03:45.132211253 +0000 UTC m=+4037.125518649" lastFinishedPulling="2025-09-29 20:03:47.603382213 +0000 UTC m=+4039.596689630" observedRunningTime="2025-09-29 20:03:48.180316092 +0000 UTC m=+4040.173623498" watchObservedRunningTime="2025-09-29 20:03:48.182653393 +0000 UTC m=+4040.175960789" Sep 29 20:03:54 crc kubenswrapper[4792]: I0929 20:03:54.170330 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-ffrc6" Sep 29 20:03:54 crc kubenswrapper[4792]: I0929 20:03:54.172122 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-ffrc6" Sep 29 20:03:54 crc kubenswrapper[4792]: I0929 20:03:54.242420 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-ffrc6" Sep 29 20:03:55 crc kubenswrapper[4792]: I0929 20:03:55.285436 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-ffrc6" Sep 29 20:03:55 crc kubenswrapper[4792]: I0929 20:03:55.337367 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-ffrc6"] Sep 29 20:03:57 crc kubenswrapper[4792]: I0929 20:03:57.246751 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-ffrc6" podUID="0cc9c5ca-1a4f-4f06-b12b-6527d00718cb" containerName="registry-server" containerID="cri-o://a31b5666815a3d898fad1605c4fc1db0ca9e1c1f8295a671eb70c2c5cc8dedb8" gracePeriod=2 Sep 29 20:03:57 crc kubenswrapper[4792]: I0929 20:03:57.807320 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-ffrc6" Sep 29 20:03:57 crc kubenswrapper[4792]: I0929 20:03:57.826360 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0cc9c5ca-1a4f-4f06-b12b-6527d00718cb-catalog-content\") pod \"0cc9c5ca-1a4f-4f06-b12b-6527d00718cb\" (UID: \"0cc9c5ca-1a4f-4f06-b12b-6527d00718cb\") " Sep 29 20:03:57 crc kubenswrapper[4792]: I0929 20:03:57.826698 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-26ztm\" (UniqueName: \"kubernetes.io/projected/0cc9c5ca-1a4f-4f06-b12b-6527d00718cb-kube-api-access-26ztm\") pod \"0cc9c5ca-1a4f-4f06-b12b-6527d00718cb\" (UID: \"0cc9c5ca-1a4f-4f06-b12b-6527d00718cb\") " Sep 29 20:03:57 crc kubenswrapper[4792]: I0929 20:03:57.826875 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0cc9c5ca-1a4f-4f06-b12b-6527d00718cb-utilities\") pod \"0cc9c5ca-1a4f-4f06-b12b-6527d00718cb\" (UID: \"0cc9c5ca-1a4f-4f06-b12b-6527d00718cb\") " Sep 29 20:03:57 crc kubenswrapper[4792]: I0929 20:03:57.828545 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0cc9c5ca-1a4f-4f06-b12b-6527d00718cb-utilities" (OuterVolumeSpecName: "utilities") pod "0cc9c5ca-1a4f-4f06-b12b-6527d00718cb" (UID: "0cc9c5ca-1a4f-4f06-b12b-6527d00718cb"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 20:03:57 crc kubenswrapper[4792]: I0929 20:03:57.833078 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0cc9c5ca-1a4f-4f06-b12b-6527d00718cb-kube-api-access-26ztm" (OuterVolumeSpecName: "kube-api-access-26ztm") pod "0cc9c5ca-1a4f-4f06-b12b-6527d00718cb" (UID: "0cc9c5ca-1a4f-4f06-b12b-6527d00718cb"). InnerVolumeSpecName "kube-api-access-26ztm". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 20:03:57 crc kubenswrapper[4792]: I0929 20:03:57.843022 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0cc9c5ca-1a4f-4f06-b12b-6527d00718cb-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "0cc9c5ca-1a4f-4f06-b12b-6527d00718cb" (UID: "0cc9c5ca-1a4f-4f06-b12b-6527d00718cb"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 20:03:57 crc kubenswrapper[4792]: I0929 20:03:57.928790 4792 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0cc9c5ca-1a4f-4f06-b12b-6527d00718cb-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 20:03:57 crc kubenswrapper[4792]: I0929 20:03:57.928819 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-26ztm\" (UniqueName: \"kubernetes.io/projected/0cc9c5ca-1a4f-4f06-b12b-6527d00718cb-kube-api-access-26ztm\") on node \"crc\" DevicePath \"\"" Sep 29 20:03:57 crc kubenswrapper[4792]: I0929 20:03:57.928831 4792 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0cc9c5ca-1a4f-4f06-b12b-6527d00718cb-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 20:03:58 crc kubenswrapper[4792]: I0929 20:03:58.258635 4792 generic.go:334] "Generic (PLEG): container finished" podID="0cc9c5ca-1a4f-4f06-b12b-6527d00718cb" containerID="a31b5666815a3d898fad1605c4fc1db0ca9e1c1f8295a671eb70c2c5cc8dedb8" exitCode=0 Sep 29 20:03:58 crc kubenswrapper[4792]: I0929 20:03:58.258675 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-ffrc6" event={"ID":"0cc9c5ca-1a4f-4f06-b12b-6527d00718cb","Type":"ContainerDied","Data":"a31b5666815a3d898fad1605c4fc1db0ca9e1c1f8295a671eb70c2c5cc8dedb8"} Sep 29 20:03:58 crc kubenswrapper[4792]: I0929 20:03:58.258702 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-ffrc6" event={"ID":"0cc9c5ca-1a4f-4f06-b12b-6527d00718cb","Type":"ContainerDied","Data":"3777192256a9eacd14ee702c0fe1a402cede0965f012a8cf048dc95ae79109df"} Sep 29 20:03:58 crc kubenswrapper[4792]: I0929 20:03:58.258718 4792 scope.go:117] "RemoveContainer" containerID="a31b5666815a3d898fad1605c4fc1db0ca9e1c1f8295a671eb70c2c5cc8dedb8" Sep 29 20:03:58 crc kubenswrapper[4792]: I0929 20:03:58.258716 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-ffrc6" Sep 29 20:03:58 crc kubenswrapper[4792]: I0929 20:03:58.292177 4792 scope.go:117] "RemoveContainer" containerID="8325aace70b5e9218c8df941a86d25d77c63eb5f1ba473e390f92583fb7756c9" Sep 29 20:03:58 crc kubenswrapper[4792]: I0929 20:03:58.311010 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-ffrc6"] Sep 29 20:03:58 crc kubenswrapper[4792]: I0929 20:03:58.323661 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-ffrc6"] Sep 29 20:03:58 crc kubenswrapper[4792]: I0929 20:03:58.343447 4792 scope.go:117] "RemoveContainer" containerID="374d58bdae5a91d79885ad6e5a1b5269236f9ca5342e2ec0e787a061d0a3b677" Sep 29 20:03:58 crc kubenswrapper[4792]: I0929 20:03:58.389158 4792 scope.go:117] "RemoveContainer" containerID="a31b5666815a3d898fad1605c4fc1db0ca9e1c1f8295a671eb70c2c5cc8dedb8" Sep 29 20:03:58 crc kubenswrapper[4792]: E0929 20:03:58.389599 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a31b5666815a3d898fad1605c4fc1db0ca9e1c1f8295a671eb70c2c5cc8dedb8\": container with ID starting with a31b5666815a3d898fad1605c4fc1db0ca9e1c1f8295a671eb70c2c5cc8dedb8 not found: ID does not exist" containerID="a31b5666815a3d898fad1605c4fc1db0ca9e1c1f8295a671eb70c2c5cc8dedb8" Sep 29 20:03:58 crc kubenswrapper[4792]: I0929 20:03:58.389639 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a31b5666815a3d898fad1605c4fc1db0ca9e1c1f8295a671eb70c2c5cc8dedb8"} err="failed to get container status \"a31b5666815a3d898fad1605c4fc1db0ca9e1c1f8295a671eb70c2c5cc8dedb8\": rpc error: code = NotFound desc = could not find container \"a31b5666815a3d898fad1605c4fc1db0ca9e1c1f8295a671eb70c2c5cc8dedb8\": container with ID starting with a31b5666815a3d898fad1605c4fc1db0ca9e1c1f8295a671eb70c2c5cc8dedb8 not found: ID does not exist" Sep 29 20:03:58 crc kubenswrapper[4792]: I0929 20:03:58.389665 4792 scope.go:117] "RemoveContainer" containerID="8325aace70b5e9218c8df941a86d25d77c63eb5f1ba473e390f92583fb7756c9" Sep 29 20:03:58 crc kubenswrapper[4792]: E0929 20:03:58.390039 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8325aace70b5e9218c8df941a86d25d77c63eb5f1ba473e390f92583fb7756c9\": container with ID starting with 8325aace70b5e9218c8df941a86d25d77c63eb5f1ba473e390f92583fb7756c9 not found: ID does not exist" containerID="8325aace70b5e9218c8df941a86d25d77c63eb5f1ba473e390f92583fb7756c9" Sep 29 20:03:58 crc kubenswrapper[4792]: I0929 20:03:58.390092 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8325aace70b5e9218c8df941a86d25d77c63eb5f1ba473e390f92583fb7756c9"} err="failed to get container status \"8325aace70b5e9218c8df941a86d25d77c63eb5f1ba473e390f92583fb7756c9\": rpc error: code = NotFound desc = could not find container \"8325aace70b5e9218c8df941a86d25d77c63eb5f1ba473e390f92583fb7756c9\": container with ID starting with 8325aace70b5e9218c8df941a86d25d77c63eb5f1ba473e390f92583fb7756c9 not found: ID does not exist" Sep 29 20:03:58 crc kubenswrapper[4792]: I0929 20:03:58.390128 4792 scope.go:117] "RemoveContainer" containerID="374d58bdae5a91d79885ad6e5a1b5269236f9ca5342e2ec0e787a061d0a3b677" Sep 29 20:03:58 crc kubenswrapper[4792]: E0929 20:03:58.390377 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"374d58bdae5a91d79885ad6e5a1b5269236f9ca5342e2ec0e787a061d0a3b677\": container with ID starting with 374d58bdae5a91d79885ad6e5a1b5269236f9ca5342e2ec0e787a061d0a3b677 not found: ID does not exist" containerID="374d58bdae5a91d79885ad6e5a1b5269236f9ca5342e2ec0e787a061d0a3b677" Sep 29 20:03:58 crc kubenswrapper[4792]: I0929 20:03:58.390402 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"374d58bdae5a91d79885ad6e5a1b5269236f9ca5342e2ec0e787a061d0a3b677"} err="failed to get container status \"374d58bdae5a91d79885ad6e5a1b5269236f9ca5342e2ec0e787a061d0a3b677\": rpc error: code = NotFound desc = could not find container \"374d58bdae5a91d79885ad6e5a1b5269236f9ca5342e2ec0e787a061d0a3b677\": container with ID starting with 374d58bdae5a91d79885ad6e5a1b5269236f9ca5342e2ec0e787a061d0a3b677 not found: ID does not exist" Sep 29 20:03:59 crc kubenswrapper[4792]: I0929 20:03:59.024529 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0cc9c5ca-1a4f-4f06-b12b-6527d00718cb" path="/var/lib/kubelet/pods/0cc9c5ca-1a4f-4f06-b12b-6527d00718cb/volumes" Sep 29 20:04:00 crc kubenswrapper[4792]: I0929 20:04:00.805258 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-x4n2z"] Sep 29 20:04:00 crc kubenswrapper[4792]: E0929 20:04:00.805908 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0cc9c5ca-1a4f-4f06-b12b-6527d00718cb" containerName="registry-server" Sep 29 20:04:00 crc kubenswrapper[4792]: I0929 20:04:00.805924 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="0cc9c5ca-1a4f-4f06-b12b-6527d00718cb" containerName="registry-server" Sep 29 20:04:00 crc kubenswrapper[4792]: E0929 20:04:00.805954 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0cc9c5ca-1a4f-4f06-b12b-6527d00718cb" containerName="extract-utilities" Sep 29 20:04:00 crc kubenswrapper[4792]: I0929 20:04:00.805962 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="0cc9c5ca-1a4f-4f06-b12b-6527d00718cb" containerName="extract-utilities" Sep 29 20:04:00 crc kubenswrapper[4792]: E0929 20:04:00.805983 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0cc9c5ca-1a4f-4f06-b12b-6527d00718cb" containerName="extract-content" Sep 29 20:04:00 crc kubenswrapper[4792]: I0929 20:04:00.805993 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="0cc9c5ca-1a4f-4f06-b12b-6527d00718cb" containerName="extract-content" Sep 29 20:04:00 crc kubenswrapper[4792]: I0929 20:04:00.806211 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="0cc9c5ca-1a4f-4f06-b12b-6527d00718cb" containerName="registry-server" Sep 29 20:04:00 crc kubenswrapper[4792]: I0929 20:04:00.808050 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-x4n2z" Sep 29 20:04:00 crc kubenswrapper[4792]: I0929 20:04:00.825237 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-x4n2z"] Sep 29 20:04:00 crc kubenswrapper[4792]: I0929 20:04:00.878904 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fg4fx\" (UniqueName: \"kubernetes.io/projected/17efbf0c-8595-4794-98d7-1ae9aa74d8c1-kube-api-access-fg4fx\") pod \"certified-operators-x4n2z\" (UID: \"17efbf0c-8595-4794-98d7-1ae9aa74d8c1\") " pod="openshift-marketplace/certified-operators-x4n2z" Sep 29 20:04:00 crc kubenswrapper[4792]: I0929 20:04:00.879065 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/17efbf0c-8595-4794-98d7-1ae9aa74d8c1-catalog-content\") pod \"certified-operators-x4n2z\" (UID: \"17efbf0c-8595-4794-98d7-1ae9aa74d8c1\") " pod="openshift-marketplace/certified-operators-x4n2z" Sep 29 20:04:00 crc kubenswrapper[4792]: I0929 20:04:00.879089 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/17efbf0c-8595-4794-98d7-1ae9aa74d8c1-utilities\") pod \"certified-operators-x4n2z\" (UID: \"17efbf0c-8595-4794-98d7-1ae9aa74d8c1\") " pod="openshift-marketplace/certified-operators-x4n2z" Sep 29 20:04:00 crc kubenswrapper[4792]: I0929 20:04:00.980310 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/17efbf0c-8595-4794-98d7-1ae9aa74d8c1-catalog-content\") pod \"certified-operators-x4n2z\" (UID: \"17efbf0c-8595-4794-98d7-1ae9aa74d8c1\") " pod="openshift-marketplace/certified-operators-x4n2z" Sep 29 20:04:00 crc kubenswrapper[4792]: I0929 20:04:00.980360 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/17efbf0c-8595-4794-98d7-1ae9aa74d8c1-utilities\") pod \"certified-operators-x4n2z\" (UID: \"17efbf0c-8595-4794-98d7-1ae9aa74d8c1\") " pod="openshift-marketplace/certified-operators-x4n2z" Sep 29 20:04:00 crc kubenswrapper[4792]: I0929 20:04:00.980409 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fg4fx\" (UniqueName: \"kubernetes.io/projected/17efbf0c-8595-4794-98d7-1ae9aa74d8c1-kube-api-access-fg4fx\") pod \"certified-operators-x4n2z\" (UID: \"17efbf0c-8595-4794-98d7-1ae9aa74d8c1\") " pod="openshift-marketplace/certified-operators-x4n2z" Sep 29 20:04:00 crc kubenswrapper[4792]: I0929 20:04:00.981222 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/17efbf0c-8595-4794-98d7-1ae9aa74d8c1-catalog-content\") pod \"certified-operators-x4n2z\" (UID: \"17efbf0c-8595-4794-98d7-1ae9aa74d8c1\") " pod="openshift-marketplace/certified-operators-x4n2z" Sep 29 20:04:00 crc kubenswrapper[4792]: I0929 20:04:00.981466 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/17efbf0c-8595-4794-98d7-1ae9aa74d8c1-utilities\") pod \"certified-operators-x4n2z\" (UID: \"17efbf0c-8595-4794-98d7-1ae9aa74d8c1\") " pod="openshift-marketplace/certified-operators-x4n2z" Sep 29 20:04:01 crc kubenswrapper[4792]: I0929 20:04:01.000651 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fg4fx\" (UniqueName: \"kubernetes.io/projected/17efbf0c-8595-4794-98d7-1ae9aa74d8c1-kube-api-access-fg4fx\") pod \"certified-operators-x4n2z\" (UID: \"17efbf0c-8595-4794-98d7-1ae9aa74d8c1\") " pod="openshift-marketplace/certified-operators-x4n2z" Sep 29 20:04:01 crc kubenswrapper[4792]: I0929 20:04:01.015343 4792 scope.go:117] "RemoveContainer" containerID="6b9ec2c21abcc9abf3154171029946c7d3fe35239bc638c8d343773772ac556c" Sep 29 20:04:01 crc kubenswrapper[4792]: E0929 20:04:01.015654 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p5q59_openshift-machine-config-operator(0ae66548-086e-4ca9-bd6f-281ce46e7557)\"" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" podUID="0ae66548-086e-4ca9-bd6f-281ce46e7557" Sep 29 20:04:01 crc kubenswrapper[4792]: I0929 20:04:01.127599 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-x4n2z" Sep 29 20:04:02 crc kubenswrapper[4792]: I0929 20:04:01.781206 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-x4n2z"] Sep 29 20:04:02 crc kubenswrapper[4792]: I0929 20:04:02.299246 4792 generic.go:334] "Generic (PLEG): container finished" podID="17efbf0c-8595-4794-98d7-1ae9aa74d8c1" containerID="9f1ef12215cd7fcec34859fe47faaa38ff20a15d32b342714ed576222b782e7e" exitCode=0 Sep 29 20:04:02 crc kubenswrapper[4792]: I0929 20:04:02.299356 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-x4n2z" event={"ID":"17efbf0c-8595-4794-98d7-1ae9aa74d8c1","Type":"ContainerDied","Data":"9f1ef12215cd7fcec34859fe47faaa38ff20a15d32b342714ed576222b782e7e"} Sep 29 20:04:02 crc kubenswrapper[4792]: I0929 20:04:02.299537 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-x4n2z" event={"ID":"17efbf0c-8595-4794-98d7-1ae9aa74d8c1","Type":"ContainerStarted","Data":"60de91f921cd59c2fc7e3f5a5e0caad311e55d0308babb1bf405aac61178b166"} Sep 29 20:04:03 crc kubenswrapper[4792]: I0929 20:04:03.309503 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-x4n2z" event={"ID":"17efbf0c-8595-4794-98d7-1ae9aa74d8c1","Type":"ContainerStarted","Data":"47e27ea6b55e9d3e37534aa3b7e1387e147cf41bc6a62f57ef204c47ae23c82e"} Sep 29 20:04:04 crc kubenswrapper[4792]: I0929 20:04:04.320990 4792 generic.go:334] "Generic (PLEG): container finished" podID="17efbf0c-8595-4794-98d7-1ae9aa74d8c1" containerID="47e27ea6b55e9d3e37534aa3b7e1387e147cf41bc6a62f57ef204c47ae23c82e" exitCode=0 Sep 29 20:04:04 crc kubenswrapper[4792]: I0929 20:04:04.321073 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-x4n2z" event={"ID":"17efbf0c-8595-4794-98d7-1ae9aa74d8c1","Type":"ContainerDied","Data":"47e27ea6b55e9d3e37534aa3b7e1387e147cf41bc6a62f57ef204c47ae23c82e"} Sep 29 20:04:05 crc kubenswrapper[4792]: I0929 20:04:05.336465 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-x4n2z" event={"ID":"17efbf0c-8595-4794-98d7-1ae9aa74d8c1","Type":"ContainerStarted","Data":"c5bd586d23dfea730d5eebc49cdfb70781a05d4bd66f92d4b6ef3b37a34aa3ce"} Sep 29 20:04:05 crc kubenswrapper[4792]: I0929 20:04:05.360548 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-x4n2z" podStartSLOduration=2.885053037 podStartE2EDuration="5.36052614s" podCreationTimestamp="2025-09-29 20:04:00 +0000 UTC" firstStartedPulling="2025-09-29 20:04:02.301741212 +0000 UTC m=+4054.295048638" lastFinishedPulling="2025-09-29 20:04:04.777214345 +0000 UTC m=+4056.770521741" observedRunningTime="2025-09-29 20:04:05.354729788 +0000 UTC m=+4057.348037224" watchObservedRunningTime="2025-09-29 20:04:05.36052614 +0000 UTC m=+4057.353833546" Sep 29 20:04:11 crc kubenswrapper[4792]: I0929 20:04:11.127807 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-x4n2z" Sep 29 20:04:11 crc kubenswrapper[4792]: I0929 20:04:11.128637 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-x4n2z" Sep 29 20:04:11 crc kubenswrapper[4792]: I0929 20:04:11.172244 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-x4n2z" Sep 29 20:04:11 crc kubenswrapper[4792]: I0929 20:04:11.442187 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-x4n2z" Sep 29 20:04:11 crc kubenswrapper[4792]: I0929 20:04:11.490341 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-x4n2z"] Sep 29 20:04:13 crc kubenswrapper[4792]: I0929 20:04:13.418143 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-x4n2z" podUID="17efbf0c-8595-4794-98d7-1ae9aa74d8c1" containerName="registry-server" containerID="cri-o://c5bd586d23dfea730d5eebc49cdfb70781a05d4bd66f92d4b6ef3b37a34aa3ce" gracePeriod=2 Sep 29 20:04:13 crc kubenswrapper[4792]: I0929 20:04:13.939844 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-x4n2z" Sep 29 20:04:14 crc kubenswrapper[4792]: I0929 20:04:14.015085 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/17efbf0c-8595-4794-98d7-1ae9aa74d8c1-catalog-content\") pod \"17efbf0c-8595-4794-98d7-1ae9aa74d8c1\" (UID: \"17efbf0c-8595-4794-98d7-1ae9aa74d8c1\") " Sep 29 20:04:14 crc kubenswrapper[4792]: I0929 20:04:14.015169 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fg4fx\" (UniqueName: \"kubernetes.io/projected/17efbf0c-8595-4794-98d7-1ae9aa74d8c1-kube-api-access-fg4fx\") pod \"17efbf0c-8595-4794-98d7-1ae9aa74d8c1\" (UID: \"17efbf0c-8595-4794-98d7-1ae9aa74d8c1\") " Sep 29 20:04:14 crc kubenswrapper[4792]: I0929 20:04:14.015235 4792 scope.go:117] "RemoveContainer" containerID="6b9ec2c21abcc9abf3154171029946c7d3fe35239bc638c8d343773772ac556c" Sep 29 20:04:14 crc kubenswrapper[4792]: I0929 20:04:14.015302 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/17efbf0c-8595-4794-98d7-1ae9aa74d8c1-utilities\") pod \"17efbf0c-8595-4794-98d7-1ae9aa74d8c1\" (UID: \"17efbf0c-8595-4794-98d7-1ae9aa74d8c1\") " Sep 29 20:04:14 crc kubenswrapper[4792]: E0929 20:04:14.015491 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p5q59_openshift-machine-config-operator(0ae66548-086e-4ca9-bd6f-281ce46e7557)\"" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" podUID="0ae66548-086e-4ca9-bd6f-281ce46e7557" Sep 29 20:04:14 crc kubenswrapper[4792]: I0929 20:04:14.016433 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/17efbf0c-8595-4794-98d7-1ae9aa74d8c1-utilities" (OuterVolumeSpecName: "utilities") pod "17efbf0c-8595-4794-98d7-1ae9aa74d8c1" (UID: "17efbf0c-8595-4794-98d7-1ae9aa74d8c1"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 20:04:14 crc kubenswrapper[4792]: I0929 20:04:14.021063 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/17efbf0c-8595-4794-98d7-1ae9aa74d8c1-kube-api-access-fg4fx" (OuterVolumeSpecName: "kube-api-access-fg4fx") pod "17efbf0c-8595-4794-98d7-1ae9aa74d8c1" (UID: "17efbf0c-8595-4794-98d7-1ae9aa74d8c1"). InnerVolumeSpecName "kube-api-access-fg4fx". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 20:04:14 crc kubenswrapper[4792]: I0929 20:04:14.068538 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/17efbf0c-8595-4794-98d7-1ae9aa74d8c1-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "17efbf0c-8595-4794-98d7-1ae9aa74d8c1" (UID: "17efbf0c-8595-4794-98d7-1ae9aa74d8c1"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 20:04:14 crc kubenswrapper[4792]: I0929 20:04:14.117755 4792 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/17efbf0c-8595-4794-98d7-1ae9aa74d8c1-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 20:04:14 crc kubenswrapper[4792]: I0929 20:04:14.117794 4792 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/17efbf0c-8595-4794-98d7-1ae9aa74d8c1-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 20:04:14 crc kubenswrapper[4792]: I0929 20:04:14.117806 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fg4fx\" (UniqueName: \"kubernetes.io/projected/17efbf0c-8595-4794-98d7-1ae9aa74d8c1-kube-api-access-fg4fx\") on node \"crc\" DevicePath \"\"" Sep 29 20:04:14 crc kubenswrapper[4792]: I0929 20:04:14.428949 4792 generic.go:334] "Generic (PLEG): container finished" podID="17efbf0c-8595-4794-98d7-1ae9aa74d8c1" containerID="c5bd586d23dfea730d5eebc49cdfb70781a05d4bd66f92d4b6ef3b37a34aa3ce" exitCode=0 Sep 29 20:04:14 crc kubenswrapper[4792]: I0929 20:04:14.429030 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-x4n2z" event={"ID":"17efbf0c-8595-4794-98d7-1ae9aa74d8c1","Type":"ContainerDied","Data":"c5bd586d23dfea730d5eebc49cdfb70781a05d4bd66f92d4b6ef3b37a34aa3ce"} Sep 29 20:04:14 crc kubenswrapper[4792]: I0929 20:04:14.430003 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-x4n2z" event={"ID":"17efbf0c-8595-4794-98d7-1ae9aa74d8c1","Type":"ContainerDied","Data":"60de91f921cd59c2fc7e3f5a5e0caad311e55d0308babb1bf405aac61178b166"} Sep 29 20:04:14 crc kubenswrapper[4792]: I0929 20:04:14.430038 4792 scope.go:117] "RemoveContainer" containerID="c5bd586d23dfea730d5eebc49cdfb70781a05d4bd66f92d4b6ef3b37a34aa3ce" Sep 29 20:04:14 crc kubenswrapper[4792]: I0929 20:04:14.429054 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-x4n2z" Sep 29 20:04:14 crc kubenswrapper[4792]: I0929 20:04:14.463448 4792 scope.go:117] "RemoveContainer" containerID="47e27ea6b55e9d3e37534aa3b7e1387e147cf41bc6a62f57ef204c47ae23c82e" Sep 29 20:04:14 crc kubenswrapper[4792]: I0929 20:04:14.475231 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-x4n2z"] Sep 29 20:04:14 crc kubenswrapper[4792]: I0929 20:04:14.488714 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-x4n2z"] Sep 29 20:04:14 crc kubenswrapper[4792]: I0929 20:04:14.504053 4792 scope.go:117] "RemoveContainer" containerID="9f1ef12215cd7fcec34859fe47faaa38ff20a15d32b342714ed576222b782e7e" Sep 29 20:04:14 crc kubenswrapper[4792]: I0929 20:04:14.556400 4792 scope.go:117] "RemoveContainer" containerID="c5bd586d23dfea730d5eebc49cdfb70781a05d4bd66f92d4b6ef3b37a34aa3ce" Sep 29 20:04:14 crc kubenswrapper[4792]: E0929 20:04:14.557021 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c5bd586d23dfea730d5eebc49cdfb70781a05d4bd66f92d4b6ef3b37a34aa3ce\": container with ID starting with c5bd586d23dfea730d5eebc49cdfb70781a05d4bd66f92d4b6ef3b37a34aa3ce not found: ID does not exist" containerID="c5bd586d23dfea730d5eebc49cdfb70781a05d4bd66f92d4b6ef3b37a34aa3ce" Sep 29 20:04:14 crc kubenswrapper[4792]: I0929 20:04:14.557069 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c5bd586d23dfea730d5eebc49cdfb70781a05d4bd66f92d4b6ef3b37a34aa3ce"} err="failed to get container status \"c5bd586d23dfea730d5eebc49cdfb70781a05d4bd66f92d4b6ef3b37a34aa3ce\": rpc error: code = NotFound desc = could not find container \"c5bd586d23dfea730d5eebc49cdfb70781a05d4bd66f92d4b6ef3b37a34aa3ce\": container with ID starting with c5bd586d23dfea730d5eebc49cdfb70781a05d4bd66f92d4b6ef3b37a34aa3ce not found: ID does not exist" Sep 29 20:04:14 crc kubenswrapper[4792]: I0929 20:04:14.557102 4792 scope.go:117] "RemoveContainer" containerID="47e27ea6b55e9d3e37534aa3b7e1387e147cf41bc6a62f57ef204c47ae23c82e" Sep 29 20:04:14 crc kubenswrapper[4792]: E0929 20:04:14.557470 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"47e27ea6b55e9d3e37534aa3b7e1387e147cf41bc6a62f57ef204c47ae23c82e\": container with ID starting with 47e27ea6b55e9d3e37534aa3b7e1387e147cf41bc6a62f57ef204c47ae23c82e not found: ID does not exist" containerID="47e27ea6b55e9d3e37534aa3b7e1387e147cf41bc6a62f57ef204c47ae23c82e" Sep 29 20:04:14 crc kubenswrapper[4792]: I0929 20:04:14.557501 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"47e27ea6b55e9d3e37534aa3b7e1387e147cf41bc6a62f57ef204c47ae23c82e"} err="failed to get container status \"47e27ea6b55e9d3e37534aa3b7e1387e147cf41bc6a62f57ef204c47ae23c82e\": rpc error: code = NotFound desc = could not find container \"47e27ea6b55e9d3e37534aa3b7e1387e147cf41bc6a62f57ef204c47ae23c82e\": container with ID starting with 47e27ea6b55e9d3e37534aa3b7e1387e147cf41bc6a62f57ef204c47ae23c82e not found: ID does not exist" Sep 29 20:04:14 crc kubenswrapper[4792]: I0929 20:04:14.557521 4792 scope.go:117] "RemoveContainer" containerID="9f1ef12215cd7fcec34859fe47faaa38ff20a15d32b342714ed576222b782e7e" Sep 29 20:04:14 crc kubenswrapper[4792]: E0929 20:04:14.557824 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9f1ef12215cd7fcec34859fe47faaa38ff20a15d32b342714ed576222b782e7e\": container with ID starting with 9f1ef12215cd7fcec34859fe47faaa38ff20a15d32b342714ed576222b782e7e not found: ID does not exist" containerID="9f1ef12215cd7fcec34859fe47faaa38ff20a15d32b342714ed576222b782e7e" Sep 29 20:04:14 crc kubenswrapper[4792]: I0929 20:04:14.557990 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9f1ef12215cd7fcec34859fe47faaa38ff20a15d32b342714ed576222b782e7e"} err="failed to get container status \"9f1ef12215cd7fcec34859fe47faaa38ff20a15d32b342714ed576222b782e7e\": rpc error: code = NotFound desc = could not find container \"9f1ef12215cd7fcec34859fe47faaa38ff20a15d32b342714ed576222b782e7e\": container with ID starting with 9f1ef12215cd7fcec34859fe47faaa38ff20a15d32b342714ed576222b782e7e not found: ID does not exist" Sep 29 20:04:15 crc kubenswrapper[4792]: I0929 20:04:15.028158 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="17efbf0c-8595-4794-98d7-1ae9aa74d8c1" path="/var/lib/kubelet/pods/17efbf0c-8595-4794-98d7-1ae9aa74d8c1/volumes" Sep 29 20:04:29 crc kubenswrapper[4792]: I0929 20:04:29.042241 4792 scope.go:117] "RemoveContainer" containerID="6b9ec2c21abcc9abf3154171029946c7d3fe35239bc638c8d343773772ac556c" Sep 29 20:04:29 crc kubenswrapper[4792]: E0929 20:04:29.043087 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p5q59_openshift-machine-config-operator(0ae66548-086e-4ca9-bd6f-281ce46e7557)\"" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" podUID="0ae66548-086e-4ca9-bd6f-281ce46e7557" Sep 29 20:04:43 crc kubenswrapper[4792]: I0929 20:04:43.015428 4792 scope.go:117] "RemoveContainer" containerID="6b9ec2c21abcc9abf3154171029946c7d3fe35239bc638c8d343773772ac556c" Sep 29 20:04:43 crc kubenswrapper[4792]: E0929 20:04:43.016286 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p5q59_openshift-machine-config-operator(0ae66548-086e-4ca9-bd6f-281ce46e7557)\"" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" podUID="0ae66548-086e-4ca9-bd6f-281ce46e7557" Sep 29 20:04:57 crc kubenswrapper[4792]: I0929 20:04:57.016050 4792 scope.go:117] "RemoveContainer" containerID="6b9ec2c21abcc9abf3154171029946c7d3fe35239bc638c8d343773772ac556c" Sep 29 20:04:57 crc kubenswrapper[4792]: E0929 20:04:57.016795 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p5q59_openshift-machine-config-operator(0ae66548-086e-4ca9-bd6f-281ce46e7557)\"" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" podUID="0ae66548-086e-4ca9-bd6f-281ce46e7557" Sep 29 20:05:10 crc kubenswrapper[4792]: I0929 20:05:10.016018 4792 scope.go:117] "RemoveContainer" containerID="6b9ec2c21abcc9abf3154171029946c7d3fe35239bc638c8d343773772ac556c" Sep 29 20:05:10 crc kubenswrapper[4792]: E0929 20:05:10.018029 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p5q59_openshift-machine-config-operator(0ae66548-086e-4ca9-bd6f-281ce46e7557)\"" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" podUID="0ae66548-086e-4ca9-bd6f-281ce46e7557" Sep 29 20:05:22 crc kubenswrapper[4792]: I0929 20:05:22.015660 4792 scope.go:117] "RemoveContainer" containerID="6b9ec2c21abcc9abf3154171029946c7d3fe35239bc638c8d343773772ac556c" Sep 29 20:05:22 crc kubenswrapper[4792]: E0929 20:05:22.016295 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p5q59_openshift-machine-config-operator(0ae66548-086e-4ca9-bd6f-281ce46e7557)\"" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" podUID="0ae66548-086e-4ca9-bd6f-281ce46e7557" Sep 29 20:05:29 crc kubenswrapper[4792]: I0929 20:05:29.793409 4792 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/openstack-cell1-galera-0" podUID="9a26454d-9ce8-4591-a7dd-6f8d4df5e3a4" containerName="galera" probeResult="failure" output="command timed out" Sep 29 20:05:29 crc kubenswrapper[4792]: I0929 20:05:29.793412 4792 prober.go:107] "Probe failed" probeType="Liveness" pod="openstack/openstack-galera-0" podUID="5c8592a0-091a-48ce-996c-f42bbdaf240c" containerName="galera" probeResult="failure" output="command timed out" Sep 29 20:05:37 crc kubenswrapper[4792]: I0929 20:05:37.016800 4792 scope.go:117] "RemoveContainer" containerID="6b9ec2c21abcc9abf3154171029946c7d3fe35239bc638c8d343773772ac556c" Sep 29 20:05:37 crc kubenswrapper[4792]: E0929 20:05:37.018774 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p5q59_openshift-machine-config-operator(0ae66548-086e-4ca9-bd6f-281ce46e7557)\"" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" podUID="0ae66548-086e-4ca9-bd6f-281ce46e7557" Sep 29 20:05:51 crc kubenswrapper[4792]: I0929 20:05:51.015152 4792 scope.go:117] "RemoveContainer" containerID="6b9ec2c21abcc9abf3154171029946c7d3fe35239bc638c8d343773772ac556c" Sep 29 20:05:51 crc kubenswrapper[4792]: I0929 20:05:51.329473 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" event={"ID":"0ae66548-086e-4ca9-bd6f-281ce46e7557","Type":"ContainerStarted","Data":"edbe2c47c6cb56af81ddbfffa93aad15dfa1cbd4b7f9ffe576a99abe2914aced"} Sep 29 20:08:11 crc kubenswrapper[4792]: I0929 20:08:11.960045 4792 patch_prober.go:28] interesting pod/machine-config-daemon-p5q59 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 20:08:11 crc kubenswrapper[4792]: I0929 20:08:11.960627 4792 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" podUID="0ae66548-086e-4ca9-bd6f-281ce46e7557" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 20:08:22 crc kubenswrapper[4792]: I0929 20:08:22.920778 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-vj6p9"] Sep 29 20:08:22 crc kubenswrapper[4792]: E0929 20:08:22.921744 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="17efbf0c-8595-4794-98d7-1ae9aa74d8c1" containerName="extract-content" Sep 29 20:08:22 crc kubenswrapper[4792]: I0929 20:08:22.921764 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="17efbf0c-8595-4794-98d7-1ae9aa74d8c1" containerName="extract-content" Sep 29 20:08:22 crc kubenswrapper[4792]: E0929 20:08:22.921824 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="17efbf0c-8595-4794-98d7-1ae9aa74d8c1" containerName="extract-utilities" Sep 29 20:08:22 crc kubenswrapper[4792]: I0929 20:08:22.921833 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="17efbf0c-8595-4794-98d7-1ae9aa74d8c1" containerName="extract-utilities" Sep 29 20:08:22 crc kubenswrapper[4792]: E0929 20:08:22.921843 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="17efbf0c-8595-4794-98d7-1ae9aa74d8c1" containerName="registry-server" Sep 29 20:08:22 crc kubenswrapper[4792]: I0929 20:08:22.921867 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="17efbf0c-8595-4794-98d7-1ae9aa74d8c1" containerName="registry-server" Sep 29 20:08:22 crc kubenswrapper[4792]: I0929 20:08:22.922085 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="17efbf0c-8595-4794-98d7-1ae9aa74d8c1" containerName="registry-server" Sep 29 20:08:22 crc kubenswrapper[4792]: I0929 20:08:22.923759 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-vj6p9" Sep 29 20:08:22 crc kubenswrapper[4792]: I0929 20:08:22.938825 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-vj6p9"] Sep 29 20:08:23 crc kubenswrapper[4792]: I0929 20:08:23.060891 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fbc6009f-4fa2-464f-b472-054f0badb4eb-utilities\") pod \"redhat-operators-vj6p9\" (UID: \"fbc6009f-4fa2-464f-b472-054f0badb4eb\") " pod="openshift-marketplace/redhat-operators-vj6p9" Sep 29 20:08:23 crc kubenswrapper[4792]: I0929 20:08:23.060950 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zqjjd\" (UniqueName: \"kubernetes.io/projected/fbc6009f-4fa2-464f-b472-054f0badb4eb-kube-api-access-zqjjd\") pod \"redhat-operators-vj6p9\" (UID: \"fbc6009f-4fa2-464f-b472-054f0badb4eb\") " pod="openshift-marketplace/redhat-operators-vj6p9" Sep 29 20:08:23 crc kubenswrapper[4792]: I0929 20:08:23.061052 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fbc6009f-4fa2-464f-b472-054f0badb4eb-catalog-content\") pod \"redhat-operators-vj6p9\" (UID: \"fbc6009f-4fa2-464f-b472-054f0badb4eb\") " pod="openshift-marketplace/redhat-operators-vj6p9" Sep 29 20:08:23 crc kubenswrapper[4792]: I0929 20:08:23.162272 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fbc6009f-4fa2-464f-b472-054f0badb4eb-utilities\") pod \"redhat-operators-vj6p9\" (UID: \"fbc6009f-4fa2-464f-b472-054f0badb4eb\") " pod="openshift-marketplace/redhat-operators-vj6p9" Sep 29 20:08:23 crc kubenswrapper[4792]: I0929 20:08:23.162315 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zqjjd\" (UniqueName: \"kubernetes.io/projected/fbc6009f-4fa2-464f-b472-054f0badb4eb-kube-api-access-zqjjd\") pod \"redhat-operators-vj6p9\" (UID: \"fbc6009f-4fa2-464f-b472-054f0badb4eb\") " pod="openshift-marketplace/redhat-operators-vj6p9" Sep 29 20:08:23 crc kubenswrapper[4792]: I0929 20:08:23.162459 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fbc6009f-4fa2-464f-b472-054f0badb4eb-catalog-content\") pod \"redhat-operators-vj6p9\" (UID: \"fbc6009f-4fa2-464f-b472-054f0badb4eb\") " pod="openshift-marketplace/redhat-operators-vj6p9" Sep 29 20:08:23 crc kubenswrapper[4792]: I0929 20:08:23.163517 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fbc6009f-4fa2-464f-b472-054f0badb4eb-catalog-content\") pod \"redhat-operators-vj6p9\" (UID: \"fbc6009f-4fa2-464f-b472-054f0badb4eb\") " pod="openshift-marketplace/redhat-operators-vj6p9" Sep 29 20:08:23 crc kubenswrapper[4792]: I0929 20:08:23.163763 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fbc6009f-4fa2-464f-b472-054f0badb4eb-utilities\") pod \"redhat-operators-vj6p9\" (UID: \"fbc6009f-4fa2-464f-b472-054f0badb4eb\") " pod="openshift-marketplace/redhat-operators-vj6p9" Sep 29 20:08:23 crc kubenswrapper[4792]: I0929 20:08:23.185336 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zqjjd\" (UniqueName: \"kubernetes.io/projected/fbc6009f-4fa2-464f-b472-054f0badb4eb-kube-api-access-zqjjd\") pod \"redhat-operators-vj6p9\" (UID: \"fbc6009f-4fa2-464f-b472-054f0badb4eb\") " pod="openshift-marketplace/redhat-operators-vj6p9" Sep 29 20:08:23 crc kubenswrapper[4792]: I0929 20:08:23.273216 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-vj6p9" Sep 29 20:08:23 crc kubenswrapper[4792]: I0929 20:08:23.762754 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-vj6p9"] Sep 29 20:08:24 crc kubenswrapper[4792]: I0929 20:08:24.698933 4792 generic.go:334] "Generic (PLEG): container finished" podID="fbc6009f-4fa2-464f-b472-054f0badb4eb" containerID="ded1c5a4a4fc97a2e38757d84223656bcffb3d61fc2d1504590f297539b1d1ac" exitCode=0 Sep 29 20:08:24 crc kubenswrapper[4792]: I0929 20:08:24.699219 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vj6p9" event={"ID":"fbc6009f-4fa2-464f-b472-054f0badb4eb","Type":"ContainerDied","Data":"ded1c5a4a4fc97a2e38757d84223656bcffb3d61fc2d1504590f297539b1d1ac"} Sep 29 20:08:24 crc kubenswrapper[4792]: I0929 20:08:24.699249 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vj6p9" event={"ID":"fbc6009f-4fa2-464f-b472-054f0badb4eb","Type":"ContainerStarted","Data":"5bf871db720a1260d695796945b008a615c330df94b52c391c43d4534dabd186"} Sep 29 20:08:26 crc kubenswrapper[4792]: I0929 20:08:26.717899 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vj6p9" event={"ID":"fbc6009f-4fa2-464f-b472-054f0badb4eb","Type":"ContainerStarted","Data":"41cb6d8d76972b42d9a999b078435bc992a3114e90084246263d4087a8bb4e06"} Sep 29 20:08:29 crc kubenswrapper[4792]: I0929 20:08:29.743554 4792 generic.go:334] "Generic (PLEG): container finished" podID="fbc6009f-4fa2-464f-b472-054f0badb4eb" containerID="41cb6d8d76972b42d9a999b078435bc992a3114e90084246263d4087a8bb4e06" exitCode=0 Sep 29 20:08:29 crc kubenswrapper[4792]: I0929 20:08:29.743626 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vj6p9" event={"ID":"fbc6009f-4fa2-464f-b472-054f0badb4eb","Type":"ContainerDied","Data":"41cb6d8d76972b42d9a999b078435bc992a3114e90084246263d4087a8bb4e06"} Sep 29 20:08:30 crc kubenswrapper[4792]: I0929 20:08:30.773324 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vj6p9" event={"ID":"fbc6009f-4fa2-464f-b472-054f0badb4eb","Type":"ContainerStarted","Data":"5da6da0d553160bd887097ebd023bc4688eb0c02429d1d63ef2fdfc1627527aa"} Sep 29 20:08:30 crc kubenswrapper[4792]: I0929 20:08:30.806142 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-vj6p9" podStartSLOduration=3.355163692 podStartE2EDuration="8.806121484s" podCreationTimestamp="2025-09-29 20:08:22 +0000 UTC" firstStartedPulling="2025-09-29 20:08:24.702680936 +0000 UTC m=+4316.695988332" lastFinishedPulling="2025-09-29 20:08:30.153638718 +0000 UTC m=+4322.146946124" observedRunningTime="2025-09-29 20:08:30.788747699 +0000 UTC m=+4322.782055235" watchObservedRunningTime="2025-09-29 20:08:30.806121484 +0000 UTC m=+4322.799428890" Sep 29 20:08:33 crc kubenswrapper[4792]: I0929 20:08:33.274374 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-vj6p9" Sep 29 20:08:33 crc kubenswrapper[4792]: I0929 20:08:33.274661 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-vj6p9" Sep 29 20:08:34 crc kubenswrapper[4792]: I0929 20:08:34.331501 4792 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-vj6p9" podUID="fbc6009f-4fa2-464f-b472-054f0badb4eb" containerName="registry-server" probeResult="failure" output=< Sep 29 20:08:34 crc kubenswrapper[4792]: timeout: failed to connect service ":50051" within 1s Sep 29 20:08:34 crc kubenswrapper[4792]: > Sep 29 20:08:41 crc kubenswrapper[4792]: I0929 20:08:41.960001 4792 patch_prober.go:28] interesting pod/machine-config-daemon-p5q59 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 20:08:41 crc kubenswrapper[4792]: I0929 20:08:41.960556 4792 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" podUID="0ae66548-086e-4ca9-bd6f-281ce46e7557" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 20:08:43 crc kubenswrapper[4792]: I0929 20:08:43.323513 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-vj6p9" Sep 29 20:08:43 crc kubenswrapper[4792]: I0929 20:08:43.374415 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-vj6p9" Sep 29 20:08:43 crc kubenswrapper[4792]: I0929 20:08:43.571722 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-vj6p9"] Sep 29 20:08:44 crc kubenswrapper[4792]: I0929 20:08:44.887021 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-vj6p9" podUID="fbc6009f-4fa2-464f-b472-054f0badb4eb" containerName="registry-server" containerID="cri-o://5da6da0d553160bd887097ebd023bc4688eb0c02429d1d63ef2fdfc1627527aa" gracePeriod=2 Sep 29 20:08:45 crc kubenswrapper[4792]: I0929 20:08:45.790089 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-vj6p9" Sep 29 20:08:45 crc kubenswrapper[4792]: I0929 20:08:45.901701 4792 generic.go:334] "Generic (PLEG): container finished" podID="fbc6009f-4fa2-464f-b472-054f0badb4eb" containerID="5da6da0d553160bd887097ebd023bc4688eb0c02429d1d63ef2fdfc1627527aa" exitCode=0 Sep 29 20:08:45 crc kubenswrapper[4792]: I0929 20:08:45.902136 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vj6p9" event={"ID":"fbc6009f-4fa2-464f-b472-054f0badb4eb","Type":"ContainerDied","Data":"5da6da0d553160bd887097ebd023bc4688eb0c02429d1d63ef2fdfc1627527aa"} Sep 29 20:08:45 crc kubenswrapper[4792]: I0929 20:08:45.901785 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-vj6p9" Sep 29 20:08:45 crc kubenswrapper[4792]: I0929 20:08:45.902194 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vj6p9" event={"ID":"fbc6009f-4fa2-464f-b472-054f0badb4eb","Type":"ContainerDied","Data":"5bf871db720a1260d695796945b008a615c330df94b52c391c43d4534dabd186"} Sep 29 20:08:45 crc kubenswrapper[4792]: I0929 20:08:45.902222 4792 scope.go:117] "RemoveContainer" containerID="5da6da0d553160bd887097ebd023bc4688eb0c02429d1d63ef2fdfc1627527aa" Sep 29 20:08:45 crc kubenswrapper[4792]: I0929 20:08:45.929033 4792 scope.go:117] "RemoveContainer" containerID="41cb6d8d76972b42d9a999b078435bc992a3114e90084246263d4087a8bb4e06" Sep 29 20:08:45 crc kubenswrapper[4792]: I0929 20:08:45.939642 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fbc6009f-4fa2-464f-b472-054f0badb4eb-utilities\") pod \"fbc6009f-4fa2-464f-b472-054f0badb4eb\" (UID: \"fbc6009f-4fa2-464f-b472-054f0badb4eb\") " Sep 29 20:08:45 crc kubenswrapper[4792]: I0929 20:08:45.939676 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zqjjd\" (UniqueName: \"kubernetes.io/projected/fbc6009f-4fa2-464f-b472-054f0badb4eb-kube-api-access-zqjjd\") pod \"fbc6009f-4fa2-464f-b472-054f0badb4eb\" (UID: \"fbc6009f-4fa2-464f-b472-054f0badb4eb\") " Sep 29 20:08:45 crc kubenswrapper[4792]: I0929 20:08:45.939757 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fbc6009f-4fa2-464f-b472-054f0badb4eb-catalog-content\") pod \"fbc6009f-4fa2-464f-b472-054f0badb4eb\" (UID: \"fbc6009f-4fa2-464f-b472-054f0badb4eb\") " Sep 29 20:08:45 crc kubenswrapper[4792]: I0929 20:08:45.940471 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fbc6009f-4fa2-464f-b472-054f0badb4eb-utilities" (OuterVolumeSpecName: "utilities") pod "fbc6009f-4fa2-464f-b472-054f0badb4eb" (UID: "fbc6009f-4fa2-464f-b472-054f0badb4eb"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 20:08:45 crc kubenswrapper[4792]: I0929 20:08:45.945968 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fbc6009f-4fa2-464f-b472-054f0badb4eb-kube-api-access-zqjjd" (OuterVolumeSpecName: "kube-api-access-zqjjd") pod "fbc6009f-4fa2-464f-b472-054f0badb4eb" (UID: "fbc6009f-4fa2-464f-b472-054f0badb4eb"). InnerVolumeSpecName "kube-api-access-zqjjd". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 20:08:45 crc kubenswrapper[4792]: I0929 20:08:45.953599 4792 scope.go:117] "RemoveContainer" containerID="ded1c5a4a4fc97a2e38757d84223656bcffb3d61fc2d1504590f297539b1d1ac" Sep 29 20:08:46 crc kubenswrapper[4792]: I0929 20:08:46.021129 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fbc6009f-4fa2-464f-b472-054f0badb4eb-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "fbc6009f-4fa2-464f-b472-054f0badb4eb" (UID: "fbc6009f-4fa2-464f-b472-054f0badb4eb"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 20:08:46 crc kubenswrapper[4792]: I0929 20:08:46.038000 4792 scope.go:117] "RemoveContainer" containerID="5da6da0d553160bd887097ebd023bc4688eb0c02429d1d63ef2fdfc1627527aa" Sep 29 20:08:46 crc kubenswrapper[4792]: E0929 20:08:46.038414 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5da6da0d553160bd887097ebd023bc4688eb0c02429d1d63ef2fdfc1627527aa\": container with ID starting with 5da6da0d553160bd887097ebd023bc4688eb0c02429d1d63ef2fdfc1627527aa not found: ID does not exist" containerID="5da6da0d553160bd887097ebd023bc4688eb0c02429d1d63ef2fdfc1627527aa" Sep 29 20:08:46 crc kubenswrapper[4792]: I0929 20:08:46.038442 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5da6da0d553160bd887097ebd023bc4688eb0c02429d1d63ef2fdfc1627527aa"} err="failed to get container status \"5da6da0d553160bd887097ebd023bc4688eb0c02429d1d63ef2fdfc1627527aa\": rpc error: code = NotFound desc = could not find container \"5da6da0d553160bd887097ebd023bc4688eb0c02429d1d63ef2fdfc1627527aa\": container with ID starting with 5da6da0d553160bd887097ebd023bc4688eb0c02429d1d63ef2fdfc1627527aa not found: ID does not exist" Sep 29 20:08:46 crc kubenswrapper[4792]: I0929 20:08:46.038463 4792 scope.go:117] "RemoveContainer" containerID="41cb6d8d76972b42d9a999b078435bc992a3114e90084246263d4087a8bb4e06" Sep 29 20:08:46 crc kubenswrapper[4792]: E0929 20:08:46.038649 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"41cb6d8d76972b42d9a999b078435bc992a3114e90084246263d4087a8bb4e06\": container with ID starting with 41cb6d8d76972b42d9a999b078435bc992a3114e90084246263d4087a8bb4e06 not found: ID does not exist" containerID="41cb6d8d76972b42d9a999b078435bc992a3114e90084246263d4087a8bb4e06" Sep 29 20:08:46 crc kubenswrapper[4792]: I0929 20:08:46.038675 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"41cb6d8d76972b42d9a999b078435bc992a3114e90084246263d4087a8bb4e06"} err="failed to get container status \"41cb6d8d76972b42d9a999b078435bc992a3114e90084246263d4087a8bb4e06\": rpc error: code = NotFound desc = could not find container \"41cb6d8d76972b42d9a999b078435bc992a3114e90084246263d4087a8bb4e06\": container with ID starting with 41cb6d8d76972b42d9a999b078435bc992a3114e90084246263d4087a8bb4e06 not found: ID does not exist" Sep 29 20:08:46 crc kubenswrapper[4792]: I0929 20:08:46.038688 4792 scope.go:117] "RemoveContainer" containerID="ded1c5a4a4fc97a2e38757d84223656bcffb3d61fc2d1504590f297539b1d1ac" Sep 29 20:08:46 crc kubenswrapper[4792]: E0929 20:08:46.045065 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ded1c5a4a4fc97a2e38757d84223656bcffb3d61fc2d1504590f297539b1d1ac\": container with ID starting with ded1c5a4a4fc97a2e38757d84223656bcffb3d61fc2d1504590f297539b1d1ac not found: ID does not exist" containerID="ded1c5a4a4fc97a2e38757d84223656bcffb3d61fc2d1504590f297539b1d1ac" Sep 29 20:08:46 crc kubenswrapper[4792]: I0929 20:08:46.045118 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ded1c5a4a4fc97a2e38757d84223656bcffb3d61fc2d1504590f297539b1d1ac"} err="failed to get container status \"ded1c5a4a4fc97a2e38757d84223656bcffb3d61fc2d1504590f297539b1d1ac\": rpc error: code = NotFound desc = could not find container \"ded1c5a4a4fc97a2e38757d84223656bcffb3d61fc2d1504590f297539b1d1ac\": container with ID starting with ded1c5a4a4fc97a2e38757d84223656bcffb3d61fc2d1504590f297539b1d1ac not found: ID does not exist" Sep 29 20:08:46 crc kubenswrapper[4792]: I0929 20:08:46.046149 4792 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fbc6009f-4fa2-464f-b472-054f0badb4eb-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 20:08:46 crc kubenswrapper[4792]: I0929 20:08:46.046172 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zqjjd\" (UniqueName: \"kubernetes.io/projected/fbc6009f-4fa2-464f-b472-054f0badb4eb-kube-api-access-zqjjd\") on node \"crc\" DevicePath \"\"" Sep 29 20:08:46 crc kubenswrapper[4792]: I0929 20:08:46.046181 4792 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fbc6009f-4fa2-464f-b472-054f0badb4eb-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 20:08:46 crc kubenswrapper[4792]: I0929 20:08:46.230754 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-vj6p9"] Sep 29 20:08:46 crc kubenswrapper[4792]: I0929 20:08:46.238145 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-vj6p9"] Sep 29 20:08:47 crc kubenswrapper[4792]: I0929 20:08:47.024869 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fbc6009f-4fa2-464f-b472-054f0badb4eb" path="/var/lib/kubelet/pods/fbc6009f-4fa2-464f-b472-054f0badb4eb/volumes" Sep 29 20:08:50 crc kubenswrapper[4792]: I0929 20:08:50.954991 4792 generic.go:334] "Generic (PLEG): container finished" podID="49e8a61d-e4e3-4510-b209-7d6fb5b02e2b" containerID="b070da4a6377ec4d174ed71129e854c4d41dc4b91810f01aa80c6cc25a3a689f" exitCode=0 Sep 29 20:08:50 crc kubenswrapper[4792]: I0929 20:08:50.955324 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tempest-tests-tempest" event={"ID":"49e8a61d-e4e3-4510-b209-7d6fb5b02e2b","Type":"ContainerDied","Data":"b070da4a6377ec4d174ed71129e854c4d41dc4b91810f01aa80c6cc25a3a689f"} Sep 29 20:08:52 crc kubenswrapper[4792]: I0929 20:08:52.274942 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/tempest-tests-tempest" Sep 29 20:08:52 crc kubenswrapper[4792]: I0929 20:08:52.357782 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/49e8a61d-e4e3-4510-b209-7d6fb5b02e2b-openstack-config-secret\") pod \"49e8a61d-e4e3-4510-b209-7d6fb5b02e2b\" (UID: \"49e8a61d-e4e3-4510-b209-7d6fb5b02e2b\") " Sep 29 20:08:52 crc kubenswrapper[4792]: I0929 20:08:52.357826 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/49e8a61d-e4e3-4510-b209-7d6fb5b02e2b-ca-certs\") pod \"49e8a61d-e4e3-4510-b209-7d6fb5b02e2b\" (UID: \"49e8a61d-e4e3-4510-b209-7d6fb5b02e2b\") " Sep 29 20:08:52 crc kubenswrapper[4792]: I0929 20:08:52.357885 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/49e8a61d-e4e3-4510-b209-7d6fb5b02e2b-test-operator-ephemeral-temporary\") pod \"49e8a61d-e4e3-4510-b209-7d6fb5b02e2b\" (UID: \"49e8a61d-e4e3-4510-b209-7d6fb5b02e2b\") " Sep 29 20:08:52 crc kubenswrapper[4792]: I0929 20:08:52.357907 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"test-operator-logs\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"49e8a61d-e4e3-4510-b209-7d6fb5b02e2b\" (UID: \"49e8a61d-e4e3-4510-b209-7d6fb5b02e2b\") " Sep 29 20:08:52 crc kubenswrapper[4792]: I0929 20:08:52.358065 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/49e8a61d-e4e3-4510-b209-7d6fb5b02e2b-openstack-config\") pod \"49e8a61d-e4e3-4510-b209-7d6fb5b02e2b\" (UID: \"49e8a61d-e4e3-4510-b209-7d6fb5b02e2b\") " Sep 29 20:08:52 crc kubenswrapper[4792]: I0929 20:08:52.358092 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/49e8a61d-e4e3-4510-b209-7d6fb5b02e2b-test-operator-ephemeral-workdir\") pod \"49e8a61d-e4e3-4510-b209-7d6fb5b02e2b\" (UID: \"49e8a61d-e4e3-4510-b209-7d6fb5b02e2b\") " Sep 29 20:08:52 crc kubenswrapper[4792]: I0929 20:08:52.358112 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hs88s\" (UniqueName: \"kubernetes.io/projected/49e8a61d-e4e3-4510-b209-7d6fb5b02e2b-kube-api-access-hs88s\") pod \"49e8a61d-e4e3-4510-b209-7d6fb5b02e2b\" (UID: \"49e8a61d-e4e3-4510-b209-7d6fb5b02e2b\") " Sep 29 20:08:52 crc kubenswrapper[4792]: I0929 20:08:52.358169 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/49e8a61d-e4e3-4510-b209-7d6fb5b02e2b-ssh-key\") pod \"49e8a61d-e4e3-4510-b209-7d6fb5b02e2b\" (UID: \"49e8a61d-e4e3-4510-b209-7d6fb5b02e2b\") " Sep 29 20:08:52 crc kubenswrapper[4792]: I0929 20:08:52.358206 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/49e8a61d-e4e3-4510-b209-7d6fb5b02e2b-config-data\") pod \"49e8a61d-e4e3-4510-b209-7d6fb5b02e2b\" (UID: \"49e8a61d-e4e3-4510-b209-7d6fb5b02e2b\") " Sep 29 20:08:52 crc kubenswrapper[4792]: I0929 20:08:52.359385 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49e8a61d-e4e3-4510-b209-7d6fb5b02e2b-config-data" (OuterVolumeSpecName: "config-data") pod "49e8a61d-e4e3-4510-b209-7d6fb5b02e2b" (UID: "49e8a61d-e4e3-4510-b209-7d6fb5b02e2b"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 20:08:52 crc kubenswrapper[4792]: I0929 20:08:52.364112 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/49e8a61d-e4e3-4510-b209-7d6fb5b02e2b-test-operator-ephemeral-workdir" (OuterVolumeSpecName: "test-operator-ephemeral-workdir") pod "49e8a61d-e4e3-4510-b209-7d6fb5b02e2b" (UID: "49e8a61d-e4e3-4510-b209-7d6fb5b02e2b"). InnerVolumeSpecName "test-operator-ephemeral-workdir". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 20:08:52 crc kubenswrapper[4792]: I0929 20:08:52.364759 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/49e8a61d-e4e3-4510-b209-7d6fb5b02e2b-test-operator-ephemeral-temporary" (OuterVolumeSpecName: "test-operator-ephemeral-temporary") pod "49e8a61d-e4e3-4510-b209-7d6fb5b02e2b" (UID: "49e8a61d-e4e3-4510-b209-7d6fb5b02e2b"). InnerVolumeSpecName "test-operator-ephemeral-temporary". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 20:08:52 crc kubenswrapper[4792]: I0929 20:08:52.370275 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49e8a61d-e4e3-4510-b209-7d6fb5b02e2b-kube-api-access-hs88s" (OuterVolumeSpecName: "kube-api-access-hs88s") pod "49e8a61d-e4e3-4510-b209-7d6fb5b02e2b" (UID: "49e8a61d-e4e3-4510-b209-7d6fb5b02e2b"). InnerVolumeSpecName "kube-api-access-hs88s". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 20:08:52 crc kubenswrapper[4792]: I0929 20:08:52.379092 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage07-crc" (OuterVolumeSpecName: "test-operator-logs") pod "49e8a61d-e4e3-4510-b209-7d6fb5b02e2b" (UID: "49e8a61d-e4e3-4510-b209-7d6fb5b02e2b"). InnerVolumeSpecName "local-storage07-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Sep 29 20:08:52 crc kubenswrapper[4792]: I0929 20:08:52.399299 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49e8a61d-e4e3-4510-b209-7d6fb5b02e2b-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "49e8a61d-e4e3-4510-b209-7d6fb5b02e2b" (UID: "49e8a61d-e4e3-4510-b209-7d6fb5b02e2b"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 20:08:52 crc kubenswrapper[4792]: I0929 20:08:52.411427 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49e8a61d-e4e3-4510-b209-7d6fb5b02e2b-ca-certs" (OuterVolumeSpecName: "ca-certs") pod "49e8a61d-e4e3-4510-b209-7d6fb5b02e2b" (UID: "49e8a61d-e4e3-4510-b209-7d6fb5b02e2b"). InnerVolumeSpecName "ca-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 20:08:52 crc kubenswrapper[4792]: I0929 20:08:52.413501 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49e8a61d-e4e3-4510-b209-7d6fb5b02e2b-openstack-config-secret" (OuterVolumeSpecName: "openstack-config-secret") pod "49e8a61d-e4e3-4510-b209-7d6fb5b02e2b" (UID: "49e8a61d-e4e3-4510-b209-7d6fb5b02e2b"). InnerVolumeSpecName "openstack-config-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 20:08:52 crc kubenswrapper[4792]: I0929 20:08:52.452927 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49e8a61d-e4e3-4510-b209-7d6fb5b02e2b-openstack-config" (OuterVolumeSpecName: "openstack-config") pod "49e8a61d-e4e3-4510-b209-7d6fb5b02e2b" (UID: "49e8a61d-e4e3-4510-b209-7d6fb5b02e2b"). InnerVolumeSpecName "openstack-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 20:08:52 crc kubenswrapper[4792]: I0929 20:08:52.461106 4792 reconciler_common.go:293] "Volume detached for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/49e8a61d-e4e3-4510-b209-7d6fb5b02e2b-test-operator-ephemeral-workdir\") on node \"crc\" DevicePath \"\"" Sep 29 20:08:52 crc kubenswrapper[4792]: I0929 20:08:52.461151 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hs88s\" (UniqueName: \"kubernetes.io/projected/49e8a61d-e4e3-4510-b209-7d6fb5b02e2b-kube-api-access-hs88s\") on node \"crc\" DevicePath \"\"" Sep 29 20:08:52 crc kubenswrapper[4792]: I0929 20:08:52.461169 4792 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/49e8a61d-e4e3-4510-b209-7d6fb5b02e2b-ssh-key\") on node \"crc\" DevicePath \"\"" Sep 29 20:08:52 crc kubenswrapper[4792]: I0929 20:08:52.461182 4792 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/49e8a61d-e4e3-4510-b209-7d6fb5b02e2b-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 20:08:52 crc kubenswrapper[4792]: I0929 20:08:52.461195 4792 reconciler_common.go:293] "Volume detached for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/49e8a61d-e4e3-4510-b209-7d6fb5b02e2b-openstack-config-secret\") on node \"crc\" DevicePath \"\"" Sep 29 20:08:52 crc kubenswrapper[4792]: I0929 20:08:52.461206 4792 reconciler_common.go:293] "Volume detached for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/49e8a61d-e4e3-4510-b209-7d6fb5b02e2b-ca-certs\") on node \"crc\" DevicePath \"\"" Sep 29 20:08:52 crc kubenswrapper[4792]: I0929 20:08:52.461218 4792 reconciler_common.go:293] "Volume detached for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/49e8a61d-e4e3-4510-b209-7d6fb5b02e2b-test-operator-ephemeral-temporary\") on node \"crc\" DevicePath \"\"" Sep 29 20:08:52 crc kubenswrapper[4792]: I0929 20:08:52.462077 4792 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") on node \"crc\" " Sep 29 20:08:52 crc kubenswrapper[4792]: I0929 20:08:52.462103 4792 reconciler_common.go:293] "Volume detached for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/49e8a61d-e4e3-4510-b209-7d6fb5b02e2b-openstack-config\") on node \"crc\" DevicePath \"\"" Sep 29 20:08:52 crc kubenswrapper[4792]: I0929 20:08:52.490453 4792 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage07-crc" (UniqueName: "kubernetes.io/local-volume/local-storage07-crc") on node "crc" Sep 29 20:08:52 crc kubenswrapper[4792]: I0929 20:08:52.564269 4792 reconciler_common.go:293] "Volume detached for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") on node \"crc\" DevicePath \"\"" Sep 29 20:08:52 crc kubenswrapper[4792]: I0929 20:08:52.973078 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tempest-tests-tempest" event={"ID":"49e8a61d-e4e3-4510-b209-7d6fb5b02e2b","Type":"ContainerDied","Data":"0b8f68f37978451ee1299fec315a7600236357a1383924e78f052c6678cea3a5"} Sep 29 20:08:52 crc kubenswrapper[4792]: I0929 20:08:52.973130 4792 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0b8f68f37978451ee1299fec315a7600236357a1383924e78f052c6678cea3a5" Sep 29 20:08:52 crc kubenswrapper[4792]: I0929 20:08:52.973135 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/tempest-tests-tempest" Sep 29 20:09:03 crc kubenswrapper[4792]: I0929 20:09:03.763083 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/test-operator-logs-pod-tempest-tempest-tests-tempest"] Sep 29 20:09:03 crc kubenswrapper[4792]: E0929 20:09:03.764114 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fbc6009f-4fa2-464f-b472-054f0badb4eb" containerName="extract-utilities" Sep 29 20:09:03 crc kubenswrapper[4792]: I0929 20:09:03.764127 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="fbc6009f-4fa2-464f-b472-054f0badb4eb" containerName="extract-utilities" Sep 29 20:09:03 crc kubenswrapper[4792]: E0929 20:09:03.764144 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fbc6009f-4fa2-464f-b472-054f0badb4eb" containerName="extract-content" Sep 29 20:09:03 crc kubenswrapper[4792]: I0929 20:09:03.764151 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="fbc6009f-4fa2-464f-b472-054f0badb4eb" containerName="extract-content" Sep 29 20:09:03 crc kubenswrapper[4792]: E0929 20:09:03.764166 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="49e8a61d-e4e3-4510-b209-7d6fb5b02e2b" containerName="tempest-tests-tempest-tests-runner" Sep 29 20:09:03 crc kubenswrapper[4792]: I0929 20:09:03.764172 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="49e8a61d-e4e3-4510-b209-7d6fb5b02e2b" containerName="tempest-tests-tempest-tests-runner" Sep 29 20:09:03 crc kubenswrapper[4792]: E0929 20:09:03.764201 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fbc6009f-4fa2-464f-b472-054f0badb4eb" containerName="registry-server" Sep 29 20:09:03 crc kubenswrapper[4792]: I0929 20:09:03.764207 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="fbc6009f-4fa2-464f-b472-054f0badb4eb" containerName="registry-server" Sep 29 20:09:03 crc kubenswrapper[4792]: I0929 20:09:03.764383 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="fbc6009f-4fa2-464f-b472-054f0badb4eb" containerName="registry-server" Sep 29 20:09:03 crc kubenswrapper[4792]: I0929 20:09:03.764397 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="49e8a61d-e4e3-4510-b209-7d6fb5b02e2b" containerName="tempest-tests-tempest-tests-runner" Sep 29 20:09:03 crc kubenswrapper[4792]: I0929 20:09:03.765199 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Sep 29 20:09:03 crc kubenswrapper[4792]: I0929 20:09:03.774440 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"default-dockercfg-rwtst" Sep 29 20:09:03 crc kubenswrapper[4792]: I0929 20:09:03.778258 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/test-operator-logs-pod-tempest-tempest-tests-tempest"] Sep 29 20:09:03 crc kubenswrapper[4792]: I0929 20:09:03.889649 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jd9k7\" (UniqueName: \"kubernetes.io/projected/33fd9eda-fc31-456e-8408-b9483c1fef79-kube-api-access-jd9k7\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"33fd9eda-fc31-456e-8408-b9483c1fef79\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Sep 29 20:09:03 crc kubenswrapper[4792]: I0929 20:09:03.889746 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"33fd9eda-fc31-456e-8408-b9483c1fef79\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Sep 29 20:09:03 crc kubenswrapper[4792]: I0929 20:09:03.991538 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jd9k7\" (UniqueName: \"kubernetes.io/projected/33fd9eda-fc31-456e-8408-b9483c1fef79-kube-api-access-jd9k7\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"33fd9eda-fc31-456e-8408-b9483c1fef79\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Sep 29 20:09:03 crc kubenswrapper[4792]: I0929 20:09:03.991636 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"33fd9eda-fc31-456e-8408-b9483c1fef79\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Sep 29 20:09:03 crc kubenswrapper[4792]: I0929 20:09:03.993976 4792 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"33fd9eda-fc31-456e-8408-b9483c1fef79\") device mount path \"/mnt/openstack/pv07\"" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Sep 29 20:09:04 crc kubenswrapper[4792]: I0929 20:09:04.035735 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jd9k7\" (UniqueName: \"kubernetes.io/projected/33fd9eda-fc31-456e-8408-b9483c1fef79-kube-api-access-jd9k7\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"33fd9eda-fc31-456e-8408-b9483c1fef79\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Sep 29 20:09:04 crc kubenswrapper[4792]: I0929 20:09:04.073172 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"33fd9eda-fc31-456e-8408-b9483c1fef79\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Sep 29 20:09:04 crc kubenswrapper[4792]: I0929 20:09:04.088654 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Sep 29 20:09:04 crc kubenswrapper[4792]: I0929 20:09:04.564777 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/test-operator-logs-pod-tempest-tempest-tests-tempest"] Sep 29 20:09:04 crc kubenswrapper[4792]: I0929 20:09:04.581807 4792 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Sep 29 20:09:05 crc kubenswrapper[4792]: I0929 20:09:05.113170 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" event={"ID":"33fd9eda-fc31-456e-8408-b9483c1fef79","Type":"ContainerStarted","Data":"5f6afb037c1c745bdb65c135b4e52e7cf3dbd229c344c3043a3e3a31adf97a67"} Sep 29 20:09:06 crc kubenswrapper[4792]: I0929 20:09:06.123971 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" event={"ID":"33fd9eda-fc31-456e-8408-b9483c1fef79","Type":"ContainerStarted","Data":"c92353a3c50f25d725d01c651a19c78d7d9772084571d4336ea0038d3319e464"} Sep 29 20:09:06 crc kubenswrapper[4792]: I0929 20:09:06.146092 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" podStartSLOduration=1.9089403090000001 podStartE2EDuration="3.146056515s" podCreationTimestamp="2025-09-29 20:09:03 +0000 UTC" firstStartedPulling="2025-09-29 20:09:04.58134786 +0000 UTC m=+4356.574655306" lastFinishedPulling="2025-09-29 20:09:05.818464126 +0000 UTC m=+4357.811771512" observedRunningTime="2025-09-29 20:09:06.139461752 +0000 UTC m=+4358.132769198" watchObservedRunningTime="2025-09-29 20:09:06.146056515 +0000 UTC m=+4358.139363921" Sep 29 20:09:11 crc kubenswrapper[4792]: I0929 20:09:11.959720 4792 patch_prober.go:28] interesting pod/machine-config-daemon-p5q59 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 20:09:11 crc kubenswrapper[4792]: I0929 20:09:11.960289 4792 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" podUID="0ae66548-086e-4ca9-bd6f-281ce46e7557" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 20:09:11 crc kubenswrapper[4792]: I0929 20:09:11.960334 4792 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" Sep 29 20:09:11 crc kubenswrapper[4792]: I0929 20:09:11.961217 4792 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"edbe2c47c6cb56af81ddbfffa93aad15dfa1cbd4b7f9ffe576a99abe2914aced"} pod="openshift-machine-config-operator/machine-config-daemon-p5q59" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 29 20:09:11 crc kubenswrapper[4792]: I0929 20:09:11.961304 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" podUID="0ae66548-086e-4ca9-bd6f-281ce46e7557" containerName="machine-config-daemon" containerID="cri-o://edbe2c47c6cb56af81ddbfffa93aad15dfa1cbd4b7f9ffe576a99abe2914aced" gracePeriod=600 Sep 29 20:09:12 crc kubenswrapper[4792]: I0929 20:09:12.189802 4792 generic.go:334] "Generic (PLEG): container finished" podID="0ae66548-086e-4ca9-bd6f-281ce46e7557" containerID="edbe2c47c6cb56af81ddbfffa93aad15dfa1cbd4b7f9ffe576a99abe2914aced" exitCode=0 Sep 29 20:09:12 crc kubenswrapper[4792]: I0929 20:09:12.189943 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" event={"ID":"0ae66548-086e-4ca9-bd6f-281ce46e7557","Type":"ContainerDied","Data":"edbe2c47c6cb56af81ddbfffa93aad15dfa1cbd4b7f9ffe576a99abe2914aced"} Sep 29 20:09:12 crc kubenswrapper[4792]: I0929 20:09:12.190176 4792 scope.go:117] "RemoveContainer" containerID="6b9ec2c21abcc9abf3154171029946c7d3fe35239bc638c8d343773772ac556c" Sep 29 20:09:13 crc kubenswrapper[4792]: I0929 20:09:13.202013 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" event={"ID":"0ae66548-086e-4ca9-bd6f-281ce46e7557","Type":"ContainerStarted","Data":"0c32d1da37ddbc11c79c6be27e5d9a54bae2c6e1dd6cff428ba145dc4ed7552e"} Sep 29 20:09:23 crc kubenswrapper[4792]: I0929 20:09:23.133878 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-kndnj/must-gather-27qxh"] Sep 29 20:09:23 crc kubenswrapper[4792]: I0929 20:09:23.135822 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-kndnj/must-gather-27qxh" Sep 29 20:09:23 crc kubenswrapper[4792]: I0929 20:09:23.138781 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-kndnj"/"openshift-service-ca.crt" Sep 29 20:09:23 crc kubenswrapper[4792]: I0929 20:09:23.140243 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-kndnj"/"kube-root-ca.crt" Sep 29 20:09:23 crc kubenswrapper[4792]: I0929 20:09:23.140431 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-must-gather-kndnj"/"default-dockercfg-f556l" Sep 29 20:09:23 crc kubenswrapper[4792]: I0929 20:09:23.144786 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-kndnj/must-gather-27qxh"] Sep 29 20:09:23 crc kubenswrapper[4792]: I0929 20:09:23.182026 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/5bf9f011-e08b-4209-be0e-3706bc53b487-must-gather-output\") pod \"must-gather-27qxh\" (UID: \"5bf9f011-e08b-4209-be0e-3706bc53b487\") " pod="openshift-must-gather-kndnj/must-gather-27qxh" Sep 29 20:09:23 crc kubenswrapper[4792]: I0929 20:09:23.182111 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c4h4h\" (UniqueName: \"kubernetes.io/projected/5bf9f011-e08b-4209-be0e-3706bc53b487-kube-api-access-c4h4h\") pod \"must-gather-27qxh\" (UID: \"5bf9f011-e08b-4209-be0e-3706bc53b487\") " pod="openshift-must-gather-kndnj/must-gather-27qxh" Sep 29 20:09:23 crc kubenswrapper[4792]: I0929 20:09:23.283434 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/5bf9f011-e08b-4209-be0e-3706bc53b487-must-gather-output\") pod \"must-gather-27qxh\" (UID: \"5bf9f011-e08b-4209-be0e-3706bc53b487\") " pod="openshift-must-gather-kndnj/must-gather-27qxh" Sep 29 20:09:23 crc kubenswrapper[4792]: I0929 20:09:23.283552 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c4h4h\" (UniqueName: \"kubernetes.io/projected/5bf9f011-e08b-4209-be0e-3706bc53b487-kube-api-access-c4h4h\") pod \"must-gather-27qxh\" (UID: \"5bf9f011-e08b-4209-be0e-3706bc53b487\") " pod="openshift-must-gather-kndnj/must-gather-27qxh" Sep 29 20:09:23 crc kubenswrapper[4792]: I0929 20:09:23.283927 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/5bf9f011-e08b-4209-be0e-3706bc53b487-must-gather-output\") pod \"must-gather-27qxh\" (UID: \"5bf9f011-e08b-4209-be0e-3706bc53b487\") " pod="openshift-must-gather-kndnj/must-gather-27qxh" Sep 29 20:09:23 crc kubenswrapper[4792]: I0929 20:09:23.308566 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c4h4h\" (UniqueName: \"kubernetes.io/projected/5bf9f011-e08b-4209-be0e-3706bc53b487-kube-api-access-c4h4h\") pod \"must-gather-27qxh\" (UID: \"5bf9f011-e08b-4209-be0e-3706bc53b487\") " pod="openshift-must-gather-kndnj/must-gather-27qxh" Sep 29 20:09:23 crc kubenswrapper[4792]: I0929 20:09:23.453645 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-kndnj/must-gather-27qxh" Sep 29 20:09:23 crc kubenswrapper[4792]: I0929 20:09:23.904409 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-kndnj/must-gather-27qxh"] Sep 29 20:09:24 crc kubenswrapper[4792]: I0929 20:09:24.314885 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-kndnj/must-gather-27qxh" event={"ID":"5bf9f011-e08b-4209-be0e-3706bc53b487","Type":"ContainerStarted","Data":"030bafdf33c164bc545d60ce2db836e9786de7b7ad6ffbb3eb593be4b78a2030"} Sep 29 20:09:29 crc kubenswrapper[4792]: I0929 20:09:29.371159 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-kndnj/must-gather-27qxh" event={"ID":"5bf9f011-e08b-4209-be0e-3706bc53b487","Type":"ContainerStarted","Data":"602ecff488bafb301d030f93513c2c22f902109ac38af5a8a476cc30b2659d4b"} Sep 29 20:09:29 crc kubenswrapper[4792]: I0929 20:09:29.371595 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-kndnj/must-gather-27qxh" event={"ID":"5bf9f011-e08b-4209-be0e-3706bc53b487","Type":"ContainerStarted","Data":"7dfcbfdd8808317eb05f75d27d1a3637a8200b8f42c538eb22bf7a7da8e24e42"} Sep 29 20:09:29 crc kubenswrapper[4792]: I0929 20:09:29.385297 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-kndnj/must-gather-27qxh" podStartSLOduration=2.193444575 podStartE2EDuration="6.385279085s" podCreationTimestamp="2025-09-29 20:09:23 +0000 UTC" firstStartedPulling="2025-09-29 20:09:23.908943049 +0000 UTC m=+4375.902250445" lastFinishedPulling="2025-09-29 20:09:28.100777569 +0000 UTC m=+4380.094084955" observedRunningTime="2025-09-29 20:09:29.38319328 +0000 UTC m=+4381.376500666" watchObservedRunningTime="2025-09-29 20:09:29.385279085 +0000 UTC m=+4381.378586491" Sep 29 20:09:33 crc kubenswrapper[4792]: I0929 20:09:33.969241 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-kndnj/crc-debug-pb4pn"] Sep 29 20:09:33 crc kubenswrapper[4792]: I0929 20:09:33.972040 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-kndnj/crc-debug-pb4pn" Sep 29 20:09:34 crc kubenswrapper[4792]: I0929 20:09:34.092940 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7bw76\" (UniqueName: \"kubernetes.io/projected/71ad0279-0ae6-4ed7-b641-77b9ea7fc755-kube-api-access-7bw76\") pod \"crc-debug-pb4pn\" (UID: \"71ad0279-0ae6-4ed7-b641-77b9ea7fc755\") " pod="openshift-must-gather-kndnj/crc-debug-pb4pn" Sep 29 20:09:34 crc kubenswrapper[4792]: I0929 20:09:34.093508 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/71ad0279-0ae6-4ed7-b641-77b9ea7fc755-host\") pod \"crc-debug-pb4pn\" (UID: \"71ad0279-0ae6-4ed7-b641-77b9ea7fc755\") " pod="openshift-must-gather-kndnj/crc-debug-pb4pn" Sep 29 20:09:34 crc kubenswrapper[4792]: I0929 20:09:34.195292 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/71ad0279-0ae6-4ed7-b641-77b9ea7fc755-host\") pod \"crc-debug-pb4pn\" (UID: \"71ad0279-0ae6-4ed7-b641-77b9ea7fc755\") " pod="openshift-must-gather-kndnj/crc-debug-pb4pn" Sep 29 20:09:34 crc kubenswrapper[4792]: I0929 20:09:34.195350 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7bw76\" (UniqueName: \"kubernetes.io/projected/71ad0279-0ae6-4ed7-b641-77b9ea7fc755-kube-api-access-7bw76\") pod \"crc-debug-pb4pn\" (UID: \"71ad0279-0ae6-4ed7-b641-77b9ea7fc755\") " pod="openshift-must-gather-kndnj/crc-debug-pb4pn" Sep 29 20:09:34 crc kubenswrapper[4792]: I0929 20:09:34.195415 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/71ad0279-0ae6-4ed7-b641-77b9ea7fc755-host\") pod \"crc-debug-pb4pn\" (UID: \"71ad0279-0ae6-4ed7-b641-77b9ea7fc755\") " pod="openshift-must-gather-kndnj/crc-debug-pb4pn" Sep 29 20:09:34 crc kubenswrapper[4792]: I0929 20:09:34.226047 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7bw76\" (UniqueName: \"kubernetes.io/projected/71ad0279-0ae6-4ed7-b641-77b9ea7fc755-kube-api-access-7bw76\") pod \"crc-debug-pb4pn\" (UID: \"71ad0279-0ae6-4ed7-b641-77b9ea7fc755\") " pod="openshift-must-gather-kndnj/crc-debug-pb4pn" Sep 29 20:09:34 crc kubenswrapper[4792]: I0929 20:09:34.291834 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-kndnj/crc-debug-pb4pn" Sep 29 20:09:34 crc kubenswrapper[4792]: I0929 20:09:34.414547 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-kndnj/crc-debug-pb4pn" event={"ID":"71ad0279-0ae6-4ed7-b641-77b9ea7fc755","Type":"ContainerStarted","Data":"8a1c50a8b7a3d1c1ca4f33e6126dd695b72dc9ea1874874ae86c7e7344e31e17"} Sep 29 20:09:47 crc kubenswrapper[4792]: I0929 20:09:47.534062 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-kndnj/crc-debug-pb4pn" event={"ID":"71ad0279-0ae6-4ed7-b641-77b9ea7fc755","Type":"ContainerStarted","Data":"722afaab1a67daa636189f2e5a41250af1a00f693a96dc2b090736d8c97b0a41"} Sep 29 20:09:47 crc kubenswrapper[4792]: I0929 20:09:47.561905 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-kndnj/crc-debug-pb4pn" podStartSLOduration=2.937583026 podStartE2EDuration="14.561880466s" podCreationTimestamp="2025-09-29 20:09:33 +0000 UTC" firstStartedPulling="2025-09-29 20:09:34.337838285 +0000 UTC m=+4386.331145681" lastFinishedPulling="2025-09-29 20:09:45.962135725 +0000 UTC m=+4397.955443121" observedRunningTime="2025-09-29 20:09:47.553333023 +0000 UTC m=+4399.546640419" watchObservedRunningTime="2025-09-29 20:09:47.561880466 +0000 UTC m=+4399.555187862" Sep 29 20:11:06 crc kubenswrapper[4792]: I0929 20:11:06.760017 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-65c5d84686-mddqt_9edbf81b-9313-4a4c-8dd0-b29b82f32888/barbican-api/0.log" Sep 29 20:11:06 crc kubenswrapper[4792]: I0929 20:11:06.851742 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-65c5d84686-mddqt_9edbf81b-9313-4a4c-8dd0-b29b82f32888/barbican-api-log/0.log" Sep 29 20:11:07 crc kubenswrapper[4792]: I0929 20:11:07.018801 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-6864b589b6-rj9q8_799e0c59-e4f3-4d1c-8b3e-4771a3d5fecb/barbican-keystone-listener/0.log" Sep 29 20:11:07 crc kubenswrapper[4792]: I0929 20:11:07.103663 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-6864b589b6-rj9q8_799e0c59-e4f3-4d1c-8b3e-4771a3d5fecb/barbican-keystone-listener-log/0.log" Sep 29 20:11:07 crc kubenswrapper[4792]: I0929 20:11:07.242446 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-5697845c85-cnq66_c4ee74b8-8ac4-4a34-967b-6fcb220e90fa/barbican-worker/0.log" Sep 29 20:11:07 crc kubenswrapper[4792]: I0929 20:11:07.384643 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-5697845c85-cnq66_c4ee74b8-8ac4-4a34-967b-6fcb220e90fa/barbican-worker-log/0.log" Sep 29 20:11:07 crc kubenswrapper[4792]: I0929 20:11:07.586934 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_bootstrap-edpm-deployment-openstack-edpm-ipam-slpnn_3dbdb326-a5bc-4d53-b4cc-6971b8a715e6/bootstrap-edpm-deployment-openstack-edpm-ipam/0.log" Sep 29 20:11:07 crc kubenswrapper[4792]: I0929 20:11:07.811290 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_d4da2b2f-cfd1-4d28-96fb-c7c93955a3b4/ceilometer-central-agent/0.log" Sep 29 20:11:07 crc kubenswrapper[4792]: I0929 20:11:07.877000 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_d4da2b2f-cfd1-4d28-96fb-c7c93955a3b4/ceilometer-notification-agent/0.log" Sep 29 20:11:07 crc kubenswrapper[4792]: I0929 20:11:07.929798 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_d4da2b2f-cfd1-4d28-96fb-c7c93955a3b4/proxy-httpd/0.log" Sep 29 20:11:08 crc kubenswrapper[4792]: I0929 20:11:08.068555 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_d4da2b2f-cfd1-4d28-96fb-c7c93955a3b4/sg-core/0.log" Sep 29 20:11:08 crc kubenswrapper[4792]: I0929 20:11:08.181248 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_0c9927d9-0800-4bfa-bee9-af02caf9596c/cinder-api/0.log" Sep 29 20:11:08 crc kubenswrapper[4792]: I0929 20:11:08.293724 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_0c9927d9-0800-4bfa-bee9-af02caf9596c/cinder-api-log/0.log" Sep 29 20:11:08 crc kubenswrapper[4792]: I0929 20:11:08.363244 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_705349a9-36dc-4230-a7ff-e097fc5b66d7/cinder-scheduler/0.log" Sep 29 20:11:08 crc kubenswrapper[4792]: I0929 20:11:08.542819 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_705349a9-36dc-4230-a7ff-e097fc5b66d7/probe/0.log" Sep 29 20:11:08 crc kubenswrapper[4792]: I0929 20:11:08.674665 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-network-edpm-deployment-openstack-edpm-ipam-6xccv_723e4395-18dd-4729-be31-1c5ccf8e7ec8/configure-network-edpm-deployment-openstack-edpm-ipam/0.log" Sep 29 20:11:08 crc kubenswrapper[4792]: I0929 20:11:08.891160 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-os-edpm-deployment-openstack-edpm-ipam-jfzzt_5d30a56f-01e0-422e-99bd-08328d009094/configure-os-edpm-deployment-openstack-edpm-ipam/0.log" Sep 29 20:11:09 crc kubenswrapper[4792]: I0929 20:11:09.050776 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-67cb876dc9-v2rp9_6d1053e6-7c5c-4c2c-828d-c9241606b3e1/init/0.log" Sep 29 20:11:09 crc kubenswrapper[4792]: I0929 20:11:09.273958 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-67cb876dc9-v2rp9_6d1053e6-7c5c-4c2c-828d-c9241606b3e1/init/0.log" Sep 29 20:11:09 crc kubenswrapper[4792]: I0929 20:11:09.446993 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-67cb876dc9-v2rp9_6d1053e6-7c5c-4c2c-828d-c9241606b3e1/dnsmasq-dns/0.log" Sep 29 20:11:09 crc kubenswrapper[4792]: I0929 20:11:09.517386 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_download-cache-edpm-deployment-openstack-edpm-ipam-b8wjj_c90a0d9d-bf42-4d49-9527-e859ffce83a0/download-cache-edpm-deployment-openstack-edpm-ipam/0.log" Sep 29 20:11:09 crc kubenswrapper[4792]: I0929 20:11:09.689114 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_a0dfd532-bb4f-4a68-b334-e9dcdd0e2fa2/glance-httpd/0.log" Sep 29 20:11:09 crc kubenswrapper[4792]: I0929 20:11:09.744374 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_a0dfd532-bb4f-4a68-b334-e9dcdd0e2fa2/glance-log/0.log" Sep 29 20:11:09 crc kubenswrapper[4792]: I0929 20:11:09.995639 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_fe46ba4e-dc2a-4960-97d1-fd34116ee7d6/glance-httpd/0.log" Sep 29 20:11:10 crc kubenswrapper[4792]: I0929 20:11:10.024319 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_fe46ba4e-dc2a-4960-97d1-fd34116ee7d6/glance-log/0.log" Sep 29 20:11:10 crc kubenswrapper[4792]: I0929 20:11:10.315013 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_horizon-8494dffd6-7rx5p_23845288-b122-49f0-b10d-641cfb94b66f/horizon/1.log" Sep 29 20:11:10 crc kubenswrapper[4792]: I0929 20:11:10.350745 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_horizon-8494dffd6-7rx5p_23845288-b122-49f0-b10d-641cfb94b66f/horizon/0.log" Sep 29 20:11:10 crc kubenswrapper[4792]: I0929 20:11:10.601715 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_install-certs-edpm-deployment-openstack-edpm-ipam-gfzmv_6ee2a07b-5943-4517-be5e-e1803f9d8a55/install-certs-edpm-deployment-openstack-edpm-ipam/0.log" Sep 29 20:11:10 crc kubenswrapper[4792]: I0929 20:11:10.859356 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_horizon-8494dffd6-7rx5p_23845288-b122-49f0-b10d-641cfb94b66f/horizon-log/0.log" Sep 29 20:11:10 crc kubenswrapper[4792]: I0929 20:11:10.899177 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_install-os-edpm-deployment-openstack-edpm-ipam-ppdnh_af95758e-6a40-4679-ba1c-8ebf988f1865/install-os-edpm-deployment-openstack-edpm-ipam/0.log" Sep 29 20:11:11 crc kubenswrapper[4792]: I0929 20:11:11.144273 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-cron-29319601-w5m7k_6885051d-f7a5-4076-a670-778fbd8d23ca/keystone-cron/0.log" Sep 29 20:11:11 crc kubenswrapper[4792]: I0929 20:11:11.393955 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_kube-state-metrics-0_e148125d-7567-47d9-a3c3-32bd51ee3c9c/kube-state-metrics/0.log" Sep 29 20:11:11 crc kubenswrapper[4792]: I0929 20:11:11.569275 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-754c4b8fcb-w2t8n_a4b671bb-328e-401e-933f-665848067860/keystone-api/0.log" Sep 29 20:11:11 crc kubenswrapper[4792]: I0929 20:11:11.918163 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_libvirt-edpm-deployment-openstack-edpm-ipam-s77fc_1a5948bb-2b33-40f6-9a12-1b8b4e3071a7/libvirt-edpm-deployment-openstack-edpm-ipam/0.log" Sep 29 20:11:12 crc kubenswrapper[4792]: I0929 20:11:12.534876 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-metadata-edpm-deployment-openstack-edpm-ipam-jmkwf_e87aba80-3b0a-409a-8b12-3a8b7c1290d8/neutron-metadata-edpm-deployment-openstack-edpm-ipam/0.log" Sep 29 20:11:12 crc kubenswrapper[4792]: I0929 20:11:12.643705 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-554fb67fd9-fh25j_015e1c15-2d65-42f6-8883-b0be2b5dc0ef/neutron-httpd/0.log" Sep 29 20:11:12 crc kubenswrapper[4792]: I0929 20:11:12.735134 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-554fb67fd9-fh25j_015e1c15-2d65-42f6-8883-b0be2b5dc0ef/neutron-api/0.log" Sep 29 20:11:13 crc kubenswrapper[4792]: I0929 20:11:13.323981 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_memcached-0_ea8bd43c-bb10-450a-b564-c7b4247d1252/memcached/0.log" Sep 29 20:11:13 crc kubenswrapper[4792]: I0929 20:11:13.919511 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell0-conductor-0_8c04cb97-af56-4b40-b086-990e57b48c15/nova-cell0-conductor-conductor/0.log" Sep 29 20:11:14 crc kubenswrapper[4792]: I0929 20:11:14.119700 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-conductor-0_454e3f84-9408-4433-9e44-15dde1100854/nova-cell1-conductor-conductor/0.log" Sep 29 20:11:14 crc kubenswrapper[4792]: I0929 20:11:14.126671 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_70e6c218-833c-460f-a81f-e126902df64b/nova-api-api/0.log" Sep 29 20:11:14 crc kubenswrapper[4792]: I0929 20:11:14.271455 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_70e6c218-833c-460f-a81f-e126902df64b/nova-api-log/0.log" Sep 29 20:11:14 crc kubenswrapper[4792]: I0929 20:11:14.466719 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-novncproxy-0_84ede8b3-cba9-4133-bfce-14f44cba07b8/nova-cell1-novncproxy-novncproxy/0.log" Sep 29 20:11:14 crc kubenswrapper[4792]: I0929 20:11:14.549887 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-edpm-deployment-openstack-edpm-ipam-ghrcf_477ec7c1-0c72-4b69-9a72-05d465fe26b9/nova-edpm-deployment-openstack-edpm-ipam/0.log" Sep 29 20:11:14 crc kubenswrapper[4792]: I0929 20:11:14.777989 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_de3fc643-e567-4ae6-b446-f861b63822d7/nova-metadata-log/0.log" Sep 29 20:11:15 crc kubenswrapper[4792]: I0929 20:11:15.172340 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_9a26454d-9ce8-4591-a7dd-6f8d4df5e3a4/mysql-bootstrap/0.log" Sep 29 20:11:15 crc kubenswrapper[4792]: I0929 20:11:15.262291 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-scheduler-0_38e4e8fe-a752-4d8c-aea2-07c6a92a7216/nova-scheduler-scheduler/0.log" Sep 29 20:11:15 crc kubenswrapper[4792]: I0929 20:11:15.474712 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_9a26454d-9ce8-4591-a7dd-6f8d4df5e3a4/galera/0.log" Sep 29 20:11:15 crc kubenswrapper[4792]: I0929 20:11:15.478503 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_9a26454d-9ce8-4591-a7dd-6f8d4df5e3a4/mysql-bootstrap/0.log" Sep 29 20:11:15 crc kubenswrapper[4792]: I0929 20:11:15.772188 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_5c8592a0-091a-48ce-996c-f42bbdaf240c/mysql-bootstrap/0.log" Sep 29 20:11:15 crc kubenswrapper[4792]: I0929 20:11:15.972311 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_de3fc643-e567-4ae6-b446-f861b63822d7/nova-metadata-metadata/0.log" Sep 29 20:11:16 crc kubenswrapper[4792]: I0929 20:11:16.129801 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_5c8592a0-091a-48ce-996c-f42bbdaf240c/mysql-bootstrap/0.log" Sep 29 20:11:16 crc kubenswrapper[4792]: I0929 20:11:16.142688 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_5c8592a0-091a-48ce-996c-f42bbdaf240c/galera/0.log" Sep 29 20:11:16 crc kubenswrapper[4792]: I0929 20:11:16.208079 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstackclient_36d9080e-6ba5-4a59-ac59-21f8a868df0d/openstackclient/0.log" Sep 29 20:11:16 crc kubenswrapper[4792]: I0929 20:11:16.364817 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-metrics-jcjdl_51d7ae2f-4cac-4245-b001-91413652f89e/openstack-network-exporter/0.log" Sep 29 20:11:16 crc kubenswrapper[4792]: I0929 20:11:16.466311 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-mh2vn_904d363d-f0dc-4318-9f28-d06e374a4838/ovsdb-server-init/0.log" Sep 29 20:11:16 crc kubenswrapper[4792]: I0929 20:11:16.663505 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-mh2vn_904d363d-f0dc-4318-9f28-d06e374a4838/ovs-vswitchd/0.log" Sep 29 20:11:16 crc kubenswrapper[4792]: I0929 20:11:16.683063 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-mh2vn_904d363d-f0dc-4318-9f28-d06e374a4838/ovsdb-server-init/0.log" Sep 29 20:11:16 crc kubenswrapper[4792]: I0929 20:11:16.733500 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-mh2vn_904d363d-f0dc-4318-9f28-d06e374a4838/ovsdb-server/0.log" Sep 29 20:11:16 crc kubenswrapper[4792]: I0929 20:11:16.864530 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-zvckm_321cc22b-3e6d-429f-aba5-d69c973d889e/ovn-controller/0.log" Sep 29 20:11:16 crc kubenswrapper[4792]: I0929 20:11:16.978663 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-edpm-deployment-openstack-edpm-ipam-vx68g_c694fcd6-bd39-4ec9-9b52-536c53bfff92/ovn-edpm-deployment-openstack-edpm-ipam/0.log" Sep 29 20:11:17 crc kubenswrapper[4792]: I0929 20:11:17.108964 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_934bc291-1ca4-4155-bb99-b3fde7a0d5e5/openstack-network-exporter/0.log" Sep 29 20:11:17 crc kubenswrapper[4792]: I0929 20:11:17.190401 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_934bc291-1ca4-4155-bb99-b3fde7a0d5e5/ovn-northd/0.log" Sep 29 20:11:17 crc kubenswrapper[4792]: I0929 20:11:17.283174 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_da980a6f-8dcf-4d5b-a972-fc646865967c/openstack-network-exporter/0.log" Sep 29 20:11:17 crc kubenswrapper[4792]: I0929 20:11:17.345163 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_da980a6f-8dcf-4d5b-a972-fc646865967c/ovsdbserver-nb/0.log" Sep 29 20:11:17 crc kubenswrapper[4792]: I0929 20:11:17.503286 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_272430d7-51cd-4f45-bfdd-73ed83ab0bc2/ovsdbserver-sb/0.log" Sep 29 20:11:17 crc kubenswrapper[4792]: I0929 20:11:17.516310 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_272430d7-51cd-4f45-bfdd-73ed83ab0bc2/openstack-network-exporter/0.log" Sep 29 20:11:17 crc kubenswrapper[4792]: I0929 20:11:17.762961 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-666f667548-cm9kb_d463ed77-f74f-4724-b942-1f542755d4d4/placement-api/0.log" Sep 29 20:11:17 crc kubenswrapper[4792]: I0929 20:11:17.911609 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-666f667548-cm9kb_d463ed77-f74f-4724-b942-1f542755d4d4/placement-log/0.log" Sep 29 20:11:17 crc kubenswrapper[4792]: I0929 20:11:17.930880 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_4e364c89-8b07-427c-a59a-c4576f98ddf2/setup-container/0.log" Sep 29 20:11:18 crc kubenswrapper[4792]: I0929 20:11:18.101731 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_4e364c89-8b07-427c-a59a-c4576f98ddf2/setup-container/0.log" Sep 29 20:11:18 crc kubenswrapper[4792]: I0929 20:11:18.127976 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_4e364c89-8b07-427c-a59a-c4576f98ddf2/rabbitmq/0.log" Sep 29 20:11:18 crc kubenswrapper[4792]: I0929 20:11:18.211202 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_dd9e8433-9eac-49a2-bacd-7acb220b0efd/setup-container/0.log" Sep 29 20:11:18 crc kubenswrapper[4792]: I0929 20:11:18.357975 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_dd9e8433-9eac-49a2-bacd-7acb220b0efd/setup-container/0.log" Sep 29 20:11:18 crc kubenswrapper[4792]: I0929 20:11:18.639377 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_dd9e8433-9eac-49a2-bacd-7acb220b0efd/rabbitmq/0.log" Sep 29 20:11:18 crc kubenswrapper[4792]: I0929 20:11:18.678528 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_reboot-os-edpm-deployment-openstack-edpm-ipam-25chb_3c2d29d5-9c65-4cb4-b66d-aeffaff2201f/reboot-os-edpm-deployment-openstack-edpm-ipam/0.log" Sep 29 20:11:18 crc kubenswrapper[4792]: I0929 20:11:18.779106 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_redhat-edpm-deployment-openstack-edpm-ipam-867d4_a44c4b7c-994b-4f5f-8b00-ca9da0a744f4/redhat-edpm-deployment-openstack-edpm-ipam/0.log" Sep 29 20:11:18 crc kubenswrapper[4792]: I0929 20:11:18.935385 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_repo-setup-edpm-deployment-openstack-edpm-ipam-x2x5c_15519058-5c31-4b09-b9e8-68129ad2f41e/repo-setup-edpm-deployment-openstack-edpm-ipam/0.log" Sep 29 20:11:18 crc kubenswrapper[4792]: I0929 20:11:18.996776 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_run-os-edpm-deployment-openstack-edpm-ipam-89vq8_c9424cca-92f5-490d-9a25-5feaa7010200/run-os-edpm-deployment-openstack-edpm-ipam/0.log" Sep 29 20:11:19 crc kubenswrapper[4792]: I0929 20:11:19.196394 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ssh-known-hosts-edpm-deployment-5w5vz_352317f9-484f-4680-aea0-8ebf9c6b4e44/ssh-known-hosts-edpm-deployment/0.log" Sep 29 20:11:19 crc kubenswrapper[4792]: I0929 20:11:19.393869 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-proxy-5dd4fd546c-9hwf9_8d8f74d0-be39-457e-ad50-c21d43cc942e/proxy-httpd/0.log" Sep 29 20:11:19 crc kubenswrapper[4792]: I0929 20:11:19.436780 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-proxy-5dd4fd546c-9hwf9_8d8f74d0-be39-457e-ad50-c21d43cc942e/proxy-server/0.log" Sep 29 20:11:19 crc kubenswrapper[4792]: I0929 20:11:19.480193 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-ring-rebalance-scl6k_654442d0-5361-4c10-b60a-2eb3bcf71acd/swift-ring-rebalance/0.log" Sep 29 20:11:19 crc kubenswrapper[4792]: I0929 20:11:19.614401 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_cd33a904-c32b-4781-b3fe-53d903764497/account-auditor/0.log" Sep 29 20:11:19 crc kubenswrapper[4792]: I0929 20:11:19.692981 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_cd33a904-c32b-4781-b3fe-53d903764497/account-reaper/0.log" Sep 29 20:11:19 crc kubenswrapper[4792]: I0929 20:11:19.770495 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_cd33a904-c32b-4781-b3fe-53d903764497/account-replicator/0.log" Sep 29 20:11:19 crc kubenswrapper[4792]: I0929 20:11:19.822326 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_cd33a904-c32b-4781-b3fe-53d903764497/container-auditor/0.log" Sep 29 20:11:19 crc kubenswrapper[4792]: I0929 20:11:19.884461 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_cd33a904-c32b-4781-b3fe-53d903764497/account-server/0.log" Sep 29 20:11:19 crc kubenswrapper[4792]: I0929 20:11:19.936074 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_cd33a904-c32b-4781-b3fe-53d903764497/container-replicator/0.log" Sep 29 20:11:19 crc kubenswrapper[4792]: I0929 20:11:19.997985 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_cd33a904-c32b-4781-b3fe-53d903764497/container-updater/0.log" Sep 29 20:11:20 crc kubenswrapper[4792]: I0929 20:11:20.054309 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_cd33a904-c32b-4781-b3fe-53d903764497/container-server/0.log" Sep 29 20:11:20 crc kubenswrapper[4792]: I0929 20:11:20.100155 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_cd33a904-c32b-4781-b3fe-53d903764497/object-auditor/0.log" Sep 29 20:11:20 crc kubenswrapper[4792]: I0929 20:11:20.187378 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_cd33a904-c32b-4781-b3fe-53d903764497/object-expirer/0.log" Sep 29 20:11:20 crc kubenswrapper[4792]: I0929 20:11:20.253421 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_cd33a904-c32b-4781-b3fe-53d903764497/object-server/0.log" Sep 29 20:11:20 crc kubenswrapper[4792]: I0929 20:11:20.286696 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_cd33a904-c32b-4781-b3fe-53d903764497/object-replicator/0.log" Sep 29 20:11:20 crc kubenswrapper[4792]: I0929 20:11:20.346079 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_cd33a904-c32b-4781-b3fe-53d903764497/object-updater/0.log" Sep 29 20:11:20 crc kubenswrapper[4792]: I0929 20:11:20.394474 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_cd33a904-c32b-4781-b3fe-53d903764497/rsync/0.log" Sep 29 20:11:20 crc kubenswrapper[4792]: I0929 20:11:20.482797 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_cd33a904-c32b-4781-b3fe-53d903764497/swift-recon-cron/0.log" Sep 29 20:11:20 crc kubenswrapper[4792]: I0929 20:11:20.623693 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_telemetry-edpm-deployment-openstack-edpm-ipam-hfvsz_62dafb72-d440-48ec-af0e-46ee7e16ab5a/telemetry-edpm-deployment-openstack-edpm-ipam/0.log" Sep 29 20:11:20 crc kubenswrapper[4792]: I0929 20:11:20.723397 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_tempest-tests-tempest_49e8a61d-e4e3-4510-b209-7d6fb5b02e2b/tempest-tests-tempest-tests-runner/0.log" Sep 29 20:11:20 crc kubenswrapper[4792]: I0929 20:11:20.873260 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_test-operator-logs-pod-tempest-tempest-tests-tempest_33fd9eda-fc31-456e-8408-b9483c1fef79/test-operator-logs-container/0.log" Sep 29 20:11:20 crc kubenswrapper[4792]: I0929 20:11:20.929119 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_validate-network-edpm-deployment-openstack-edpm-ipam-frsdd_6040c28a-468b-4253-8a8f-8fc98326b48b/validate-network-edpm-deployment-openstack-edpm-ipam/0.log" Sep 29 20:11:41 crc kubenswrapper[4792]: I0929 20:11:41.959327 4792 patch_prober.go:28] interesting pod/machine-config-daemon-p5q59 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 20:11:41 crc kubenswrapper[4792]: I0929 20:11:41.959870 4792 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" podUID="0ae66548-086e-4ca9-bd6f-281ce46e7557" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 20:11:52 crc kubenswrapper[4792]: I0929 20:11:52.621579 4792 generic.go:334] "Generic (PLEG): container finished" podID="71ad0279-0ae6-4ed7-b641-77b9ea7fc755" containerID="722afaab1a67daa636189f2e5a41250af1a00f693a96dc2b090736d8c97b0a41" exitCode=0 Sep 29 20:11:52 crc kubenswrapper[4792]: I0929 20:11:52.621709 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-kndnj/crc-debug-pb4pn" event={"ID":"71ad0279-0ae6-4ed7-b641-77b9ea7fc755","Type":"ContainerDied","Data":"722afaab1a67daa636189f2e5a41250af1a00f693a96dc2b090736d8c97b0a41"} Sep 29 20:11:53 crc kubenswrapper[4792]: I0929 20:11:53.727638 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-kndnj/crc-debug-pb4pn" Sep 29 20:11:53 crc kubenswrapper[4792]: I0929 20:11:53.757958 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-kndnj/crc-debug-pb4pn"] Sep 29 20:11:53 crc kubenswrapper[4792]: I0929 20:11:53.764961 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-kndnj/crc-debug-pb4pn"] Sep 29 20:11:53 crc kubenswrapper[4792]: I0929 20:11:53.855638 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7bw76\" (UniqueName: \"kubernetes.io/projected/71ad0279-0ae6-4ed7-b641-77b9ea7fc755-kube-api-access-7bw76\") pod \"71ad0279-0ae6-4ed7-b641-77b9ea7fc755\" (UID: \"71ad0279-0ae6-4ed7-b641-77b9ea7fc755\") " Sep 29 20:11:53 crc kubenswrapper[4792]: I0929 20:11:53.855719 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/71ad0279-0ae6-4ed7-b641-77b9ea7fc755-host\") pod \"71ad0279-0ae6-4ed7-b641-77b9ea7fc755\" (UID: \"71ad0279-0ae6-4ed7-b641-77b9ea7fc755\") " Sep 29 20:11:53 crc kubenswrapper[4792]: I0929 20:11:53.855774 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/71ad0279-0ae6-4ed7-b641-77b9ea7fc755-host" (OuterVolumeSpecName: "host") pod "71ad0279-0ae6-4ed7-b641-77b9ea7fc755" (UID: "71ad0279-0ae6-4ed7-b641-77b9ea7fc755"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 29 20:11:53 crc kubenswrapper[4792]: I0929 20:11:53.856115 4792 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/71ad0279-0ae6-4ed7-b641-77b9ea7fc755-host\") on node \"crc\" DevicePath \"\"" Sep 29 20:11:53 crc kubenswrapper[4792]: I0929 20:11:53.864321 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/71ad0279-0ae6-4ed7-b641-77b9ea7fc755-kube-api-access-7bw76" (OuterVolumeSpecName: "kube-api-access-7bw76") pod "71ad0279-0ae6-4ed7-b641-77b9ea7fc755" (UID: "71ad0279-0ae6-4ed7-b641-77b9ea7fc755"). InnerVolumeSpecName "kube-api-access-7bw76". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 20:11:53 crc kubenswrapper[4792]: I0929 20:11:53.957470 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7bw76\" (UniqueName: \"kubernetes.io/projected/71ad0279-0ae6-4ed7-b641-77b9ea7fc755-kube-api-access-7bw76\") on node \"crc\" DevicePath \"\"" Sep 29 20:11:54 crc kubenswrapper[4792]: I0929 20:11:54.639829 4792 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8a1c50a8b7a3d1c1ca4f33e6126dd695b72dc9ea1874874ae86c7e7344e31e17" Sep 29 20:11:54 crc kubenswrapper[4792]: I0929 20:11:54.639923 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-kndnj/crc-debug-pb4pn" Sep 29 20:11:54 crc kubenswrapper[4792]: I0929 20:11:54.937648 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-kndnj/crc-debug-rgqqp"] Sep 29 20:11:54 crc kubenswrapper[4792]: E0929 20:11:54.938244 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="71ad0279-0ae6-4ed7-b641-77b9ea7fc755" containerName="container-00" Sep 29 20:11:54 crc kubenswrapper[4792]: I0929 20:11:54.938255 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="71ad0279-0ae6-4ed7-b641-77b9ea7fc755" containerName="container-00" Sep 29 20:11:54 crc kubenswrapper[4792]: I0929 20:11:54.938448 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="71ad0279-0ae6-4ed7-b641-77b9ea7fc755" containerName="container-00" Sep 29 20:11:54 crc kubenswrapper[4792]: I0929 20:11:54.939156 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-kndnj/crc-debug-rgqqp" Sep 29 20:11:55 crc kubenswrapper[4792]: I0929 20:11:55.027812 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="71ad0279-0ae6-4ed7-b641-77b9ea7fc755" path="/var/lib/kubelet/pods/71ad0279-0ae6-4ed7-b641-77b9ea7fc755/volumes" Sep 29 20:11:55 crc kubenswrapper[4792]: I0929 20:11:55.075185 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vbbxm\" (UniqueName: \"kubernetes.io/projected/a1272153-fce2-424d-a9f3-a7a83f5ad4cd-kube-api-access-vbbxm\") pod \"crc-debug-rgqqp\" (UID: \"a1272153-fce2-424d-a9f3-a7a83f5ad4cd\") " pod="openshift-must-gather-kndnj/crc-debug-rgqqp" Sep 29 20:11:55 crc kubenswrapper[4792]: I0929 20:11:55.075297 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/a1272153-fce2-424d-a9f3-a7a83f5ad4cd-host\") pod \"crc-debug-rgqqp\" (UID: \"a1272153-fce2-424d-a9f3-a7a83f5ad4cd\") " pod="openshift-must-gather-kndnj/crc-debug-rgqqp" Sep 29 20:11:55 crc kubenswrapper[4792]: I0929 20:11:55.176630 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vbbxm\" (UniqueName: \"kubernetes.io/projected/a1272153-fce2-424d-a9f3-a7a83f5ad4cd-kube-api-access-vbbxm\") pod \"crc-debug-rgqqp\" (UID: \"a1272153-fce2-424d-a9f3-a7a83f5ad4cd\") " pod="openshift-must-gather-kndnj/crc-debug-rgqqp" Sep 29 20:11:55 crc kubenswrapper[4792]: I0929 20:11:55.176798 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/a1272153-fce2-424d-a9f3-a7a83f5ad4cd-host\") pod \"crc-debug-rgqqp\" (UID: \"a1272153-fce2-424d-a9f3-a7a83f5ad4cd\") " pod="openshift-must-gather-kndnj/crc-debug-rgqqp" Sep 29 20:11:55 crc kubenswrapper[4792]: I0929 20:11:55.177055 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/a1272153-fce2-424d-a9f3-a7a83f5ad4cd-host\") pod \"crc-debug-rgqqp\" (UID: \"a1272153-fce2-424d-a9f3-a7a83f5ad4cd\") " pod="openshift-must-gather-kndnj/crc-debug-rgqqp" Sep 29 20:11:55 crc kubenswrapper[4792]: I0929 20:11:55.193133 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vbbxm\" (UniqueName: \"kubernetes.io/projected/a1272153-fce2-424d-a9f3-a7a83f5ad4cd-kube-api-access-vbbxm\") pod \"crc-debug-rgqqp\" (UID: \"a1272153-fce2-424d-a9f3-a7a83f5ad4cd\") " pod="openshift-must-gather-kndnj/crc-debug-rgqqp" Sep 29 20:11:55 crc kubenswrapper[4792]: I0929 20:11:55.269785 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-kndnj/crc-debug-rgqqp" Sep 29 20:11:55 crc kubenswrapper[4792]: W0929 20:11:55.298155 4792 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda1272153_fce2_424d_a9f3_a7a83f5ad4cd.slice/crio-ccb2315be19eee824ce8dd2b6fc0f55ae6e38dc5d81689e59328342832097066 WatchSource:0}: Error finding container ccb2315be19eee824ce8dd2b6fc0f55ae6e38dc5d81689e59328342832097066: Status 404 returned error can't find the container with id ccb2315be19eee824ce8dd2b6fc0f55ae6e38dc5d81689e59328342832097066 Sep 29 20:11:55 crc kubenswrapper[4792]: I0929 20:11:55.651872 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-kndnj/crc-debug-rgqqp" event={"ID":"a1272153-fce2-424d-a9f3-a7a83f5ad4cd","Type":"ContainerStarted","Data":"cb681aab73917e6e488fadafc476b2ba5b0a8d4acb994922f68c4071342dd5a0"} Sep 29 20:11:55 crc kubenswrapper[4792]: I0929 20:11:55.652193 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-kndnj/crc-debug-rgqqp" event={"ID":"a1272153-fce2-424d-a9f3-a7a83f5ad4cd","Type":"ContainerStarted","Data":"ccb2315be19eee824ce8dd2b6fc0f55ae6e38dc5d81689e59328342832097066"} Sep 29 20:11:55 crc kubenswrapper[4792]: I0929 20:11:55.667608 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-kndnj/crc-debug-rgqqp" podStartSLOduration=1.6675860249999999 podStartE2EDuration="1.667586025s" podCreationTimestamp="2025-09-29 20:11:54 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 20:11:55.666288761 +0000 UTC m=+4527.659596187" watchObservedRunningTime="2025-09-29 20:11:55.667586025 +0000 UTC m=+4527.660893421" Sep 29 20:11:56 crc kubenswrapper[4792]: I0929 20:11:56.659296 4792 generic.go:334] "Generic (PLEG): container finished" podID="a1272153-fce2-424d-a9f3-a7a83f5ad4cd" containerID="cb681aab73917e6e488fadafc476b2ba5b0a8d4acb994922f68c4071342dd5a0" exitCode=0 Sep 29 20:11:56 crc kubenswrapper[4792]: I0929 20:11:56.659572 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-kndnj/crc-debug-rgqqp" event={"ID":"a1272153-fce2-424d-a9f3-a7a83f5ad4cd","Type":"ContainerDied","Data":"cb681aab73917e6e488fadafc476b2ba5b0a8d4acb994922f68c4071342dd5a0"} Sep 29 20:11:57 crc kubenswrapper[4792]: I0929 20:11:57.763152 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-kndnj/crc-debug-rgqqp" Sep 29 20:11:57 crc kubenswrapper[4792]: I0929 20:11:57.937542 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vbbxm\" (UniqueName: \"kubernetes.io/projected/a1272153-fce2-424d-a9f3-a7a83f5ad4cd-kube-api-access-vbbxm\") pod \"a1272153-fce2-424d-a9f3-a7a83f5ad4cd\" (UID: \"a1272153-fce2-424d-a9f3-a7a83f5ad4cd\") " Sep 29 20:11:57 crc kubenswrapper[4792]: I0929 20:11:57.937756 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/a1272153-fce2-424d-a9f3-a7a83f5ad4cd-host\") pod \"a1272153-fce2-424d-a9f3-a7a83f5ad4cd\" (UID: \"a1272153-fce2-424d-a9f3-a7a83f5ad4cd\") " Sep 29 20:11:57 crc kubenswrapper[4792]: I0929 20:11:57.937918 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/a1272153-fce2-424d-a9f3-a7a83f5ad4cd-host" (OuterVolumeSpecName: "host") pod "a1272153-fce2-424d-a9f3-a7a83f5ad4cd" (UID: "a1272153-fce2-424d-a9f3-a7a83f5ad4cd"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 29 20:11:57 crc kubenswrapper[4792]: I0929 20:11:57.938228 4792 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/a1272153-fce2-424d-a9f3-a7a83f5ad4cd-host\") on node \"crc\" DevicePath \"\"" Sep 29 20:11:57 crc kubenswrapper[4792]: I0929 20:11:57.960146 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a1272153-fce2-424d-a9f3-a7a83f5ad4cd-kube-api-access-vbbxm" (OuterVolumeSpecName: "kube-api-access-vbbxm") pod "a1272153-fce2-424d-a9f3-a7a83f5ad4cd" (UID: "a1272153-fce2-424d-a9f3-a7a83f5ad4cd"). InnerVolumeSpecName "kube-api-access-vbbxm". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 20:11:58 crc kubenswrapper[4792]: I0929 20:11:58.045800 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vbbxm\" (UniqueName: \"kubernetes.io/projected/a1272153-fce2-424d-a9f3-a7a83f5ad4cd-kube-api-access-vbbxm\") on node \"crc\" DevicePath \"\"" Sep 29 20:11:58 crc kubenswrapper[4792]: I0929 20:11:58.676754 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-kndnj/crc-debug-rgqqp" event={"ID":"a1272153-fce2-424d-a9f3-a7a83f5ad4cd","Type":"ContainerDied","Data":"ccb2315be19eee824ce8dd2b6fc0f55ae6e38dc5d81689e59328342832097066"} Sep 29 20:11:58 crc kubenswrapper[4792]: I0929 20:11:58.677074 4792 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ccb2315be19eee824ce8dd2b6fc0f55ae6e38dc5d81689e59328342832097066" Sep 29 20:11:58 crc kubenswrapper[4792]: I0929 20:11:58.676804 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-kndnj/crc-debug-rgqqp" Sep 29 20:12:03 crc kubenswrapper[4792]: I0929 20:12:03.346134 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-kndnj/crc-debug-rgqqp"] Sep 29 20:12:03 crc kubenswrapper[4792]: I0929 20:12:03.352818 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-kndnj/crc-debug-rgqqp"] Sep 29 20:12:04 crc kubenswrapper[4792]: I0929 20:12:04.850147 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-kndnj/crc-debug-rgg6p"] Sep 29 20:12:04 crc kubenswrapper[4792]: E0929 20:12:04.850891 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a1272153-fce2-424d-a9f3-a7a83f5ad4cd" containerName="container-00" Sep 29 20:12:04 crc kubenswrapper[4792]: I0929 20:12:04.850912 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="a1272153-fce2-424d-a9f3-a7a83f5ad4cd" containerName="container-00" Sep 29 20:12:04 crc kubenswrapper[4792]: I0929 20:12:04.851160 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="a1272153-fce2-424d-a9f3-a7a83f5ad4cd" containerName="container-00" Sep 29 20:12:04 crc kubenswrapper[4792]: I0929 20:12:04.851881 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-kndnj/crc-debug-rgg6p" Sep 29 20:12:04 crc kubenswrapper[4792]: I0929 20:12:04.872020 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r6khk\" (UniqueName: \"kubernetes.io/projected/db329c0f-ba62-46e4-88a7-fecddf8b4f2f-kube-api-access-r6khk\") pod \"crc-debug-rgg6p\" (UID: \"db329c0f-ba62-46e4-88a7-fecddf8b4f2f\") " pod="openshift-must-gather-kndnj/crc-debug-rgg6p" Sep 29 20:12:04 crc kubenswrapper[4792]: I0929 20:12:04.872257 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/db329c0f-ba62-46e4-88a7-fecddf8b4f2f-host\") pod \"crc-debug-rgg6p\" (UID: \"db329c0f-ba62-46e4-88a7-fecddf8b4f2f\") " pod="openshift-must-gather-kndnj/crc-debug-rgg6p" Sep 29 20:12:04 crc kubenswrapper[4792]: I0929 20:12:04.974137 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r6khk\" (UniqueName: \"kubernetes.io/projected/db329c0f-ba62-46e4-88a7-fecddf8b4f2f-kube-api-access-r6khk\") pod \"crc-debug-rgg6p\" (UID: \"db329c0f-ba62-46e4-88a7-fecddf8b4f2f\") " pod="openshift-must-gather-kndnj/crc-debug-rgg6p" Sep 29 20:12:04 crc kubenswrapper[4792]: I0929 20:12:04.974209 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/db329c0f-ba62-46e4-88a7-fecddf8b4f2f-host\") pod \"crc-debug-rgg6p\" (UID: \"db329c0f-ba62-46e4-88a7-fecddf8b4f2f\") " pod="openshift-must-gather-kndnj/crc-debug-rgg6p" Sep 29 20:12:04 crc kubenswrapper[4792]: I0929 20:12:04.974410 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/db329c0f-ba62-46e4-88a7-fecddf8b4f2f-host\") pod \"crc-debug-rgg6p\" (UID: \"db329c0f-ba62-46e4-88a7-fecddf8b4f2f\") " pod="openshift-must-gather-kndnj/crc-debug-rgg6p" Sep 29 20:12:05 crc kubenswrapper[4792]: I0929 20:12:05.032361 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a1272153-fce2-424d-a9f3-a7a83f5ad4cd" path="/var/lib/kubelet/pods/a1272153-fce2-424d-a9f3-a7a83f5ad4cd/volumes" Sep 29 20:12:05 crc kubenswrapper[4792]: I0929 20:12:05.295668 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r6khk\" (UniqueName: \"kubernetes.io/projected/db329c0f-ba62-46e4-88a7-fecddf8b4f2f-kube-api-access-r6khk\") pod \"crc-debug-rgg6p\" (UID: \"db329c0f-ba62-46e4-88a7-fecddf8b4f2f\") " pod="openshift-must-gather-kndnj/crc-debug-rgg6p" Sep 29 20:12:05 crc kubenswrapper[4792]: I0929 20:12:05.472465 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-kndnj/crc-debug-rgg6p" Sep 29 20:12:05 crc kubenswrapper[4792]: I0929 20:12:05.731700 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-kndnj/crc-debug-rgg6p" event={"ID":"db329c0f-ba62-46e4-88a7-fecddf8b4f2f","Type":"ContainerStarted","Data":"5be374a29c0059fb187b01611df273654954e762a065fcb4ab77a64fcd67da31"} Sep 29 20:12:06 crc kubenswrapper[4792]: I0929 20:12:06.740094 4792 generic.go:334] "Generic (PLEG): container finished" podID="db329c0f-ba62-46e4-88a7-fecddf8b4f2f" containerID="77b144d98c68725cf8f5103452ff9a72da6009fef10481400547d9f1f921ac4c" exitCode=0 Sep 29 20:12:06 crc kubenswrapper[4792]: I0929 20:12:06.740203 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-kndnj/crc-debug-rgg6p" event={"ID":"db329c0f-ba62-46e4-88a7-fecddf8b4f2f","Type":"ContainerDied","Data":"77b144d98c68725cf8f5103452ff9a72da6009fef10481400547d9f1f921ac4c"} Sep 29 20:12:06 crc kubenswrapper[4792]: I0929 20:12:06.783328 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-kndnj/crc-debug-rgg6p"] Sep 29 20:12:06 crc kubenswrapper[4792]: I0929 20:12:06.792376 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-kndnj/crc-debug-rgg6p"] Sep 29 20:12:07 crc kubenswrapper[4792]: I0929 20:12:07.846680 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-kndnj/crc-debug-rgg6p" Sep 29 20:12:07 crc kubenswrapper[4792]: I0929 20:12:07.926128 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/db329c0f-ba62-46e4-88a7-fecddf8b4f2f-host\") pod \"db329c0f-ba62-46e4-88a7-fecddf8b4f2f\" (UID: \"db329c0f-ba62-46e4-88a7-fecddf8b4f2f\") " Sep 29 20:12:07 crc kubenswrapper[4792]: I0929 20:12:07.926222 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-r6khk\" (UniqueName: \"kubernetes.io/projected/db329c0f-ba62-46e4-88a7-fecddf8b4f2f-kube-api-access-r6khk\") pod \"db329c0f-ba62-46e4-88a7-fecddf8b4f2f\" (UID: \"db329c0f-ba62-46e4-88a7-fecddf8b4f2f\") " Sep 29 20:12:07 crc kubenswrapper[4792]: I0929 20:12:07.926306 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/db329c0f-ba62-46e4-88a7-fecddf8b4f2f-host" (OuterVolumeSpecName: "host") pod "db329c0f-ba62-46e4-88a7-fecddf8b4f2f" (UID: "db329c0f-ba62-46e4-88a7-fecddf8b4f2f"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 29 20:12:07 crc kubenswrapper[4792]: I0929 20:12:07.926565 4792 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/db329c0f-ba62-46e4-88a7-fecddf8b4f2f-host\") on node \"crc\" DevicePath \"\"" Sep 29 20:12:07 crc kubenswrapper[4792]: I0929 20:12:07.943029 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/db329c0f-ba62-46e4-88a7-fecddf8b4f2f-kube-api-access-r6khk" (OuterVolumeSpecName: "kube-api-access-r6khk") pod "db329c0f-ba62-46e4-88a7-fecddf8b4f2f" (UID: "db329c0f-ba62-46e4-88a7-fecddf8b4f2f"). InnerVolumeSpecName "kube-api-access-r6khk". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 20:12:08 crc kubenswrapper[4792]: I0929 20:12:08.027709 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-r6khk\" (UniqueName: \"kubernetes.io/projected/db329c0f-ba62-46e4-88a7-fecddf8b4f2f-kube-api-access-r6khk\") on node \"crc\" DevicePath \"\"" Sep 29 20:12:08 crc kubenswrapper[4792]: I0929 20:12:08.443285 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_2a375d0d1c57dc6ca715bf6094605ae633b88b22c8703fb4672d18d3ca9jqrr_0c02843e-3b00-4af6-8f78-d46dc77f427d/util/0.log" Sep 29 20:12:08 crc kubenswrapper[4792]: I0929 20:12:08.609675 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_2a375d0d1c57dc6ca715bf6094605ae633b88b22c8703fb4672d18d3ca9jqrr_0c02843e-3b00-4af6-8f78-d46dc77f427d/pull/0.log" Sep 29 20:12:08 crc kubenswrapper[4792]: I0929 20:12:08.641762 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_2a375d0d1c57dc6ca715bf6094605ae633b88b22c8703fb4672d18d3ca9jqrr_0c02843e-3b00-4af6-8f78-d46dc77f427d/util/0.log" Sep 29 20:12:08 crc kubenswrapper[4792]: I0929 20:12:08.709334 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_2a375d0d1c57dc6ca715bf6094605ae633b88b22c8703fb4672d18d3ca9jqrr_0c02843e-3b00-4af6-8f78-d46dc77f427d/pull/0.log" Sep 29 20:12:08 crc kubenswrapper[4792]: I0929 20:12:08.757508 4792 scope.go:117] "RemoveContainer" containerID="77b144d98c68725cf8f5103452ff9a72da6009fef10481400547d9f1f921ac4c" Sep 29 20:12:08 crc kubenswrapper[4792]: I0929 20:12:08.757764 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-kndnj/crc-debug-rgg6p" Sep 29 20:12:08 crc kubenswrapper[4792]: I0929 20:12:08.868773 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_2a375d0d1c57dc6ca715bf6094605ae633b88b22c8703fb4672d18d3ca9jqrr_0c02843e-3b00-4af6-8f78-d46dc77f427d/util/0.log" Sep 29 20:12:08 crc kubenswrapper[4792]: I0929 20:12:08.914152 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_2a375d0d1c57dc6ca715bf6094605ae633b88b22c8703fb4672d18d3ca9jqrr_0c02843e-3b00-4af6-8f78-d46dc77f427d/extract/0.log" Sep 29 20:12:08 crc kubenswrapper[4792]: I0929 20:12:08.930362 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_2a375d0d1c57dc6ca715bf6094605ae633b88b22c8703fb4672d18d3ca9jqrr_0c02843e-3b00-4af6-8f78-d46dc77f427d/pull/0.log" Sep 29 20:12:09 crc kubenswrapper[4792]: I0929 20:12:09.024651 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="db329c0f-ba62-46e4-88a7-fecddf8b4f2f" path="/var/lib/kubelet/pods/db329c0f-ba62-46e4-88a7-fecddf8b4f2f/volumes" Sep 29 20:12:09 crc kubenswrapper[4792]: I0929 20:12:09.078614 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-6ff8b75857-c6z65_ee957b59-f5b6-4306-b6a7-4550199fe910/kube-rbac-proxy/0.log" Sep 29 20:12:09 crc kubenswrapper[4792]: I0929 20:12:09.213046 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-6ff8b75857-c6z65_ee957b59-f5b6-4306-b6a7-4550199fe910/manager/0.log" Sep 29 20:12:09 crc kubenswrapper[4792]: I0929 20:12:09.216179 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-644bddb6d8-vjkgm_7f29d397-4b2d-4668-91f6-744e22070f30/kube-rbac-proxy/0.log" Sep 29 20:12:09 crc kubenswrapper[4792]: I0929 20:12:09.403090 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-644bddb6d8-vjkgm_7f29d397-4b2d-4668-91f6-744e22070f30/manager/0.log" Sep 29 20:12:09 crc kubenswrapper[4792]: I0929 20:12:09.430650 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-84f4f7b77b-h9csw_aa102219-aaa4-46c5-b783-519972688523/manager/0.log" Sep 29 20:12:09 crc kubenswrapper[4792]: I0929 20:12:09.448046 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-84f4f7b77b-h9csw_aa102219-aaa4-46c5-b783-519972688523/kube-rbac-proxy/0.log" Sep 29 20:12:09 crc kubenswrapper[4792]: I0929 20:12:09.627070 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-84958c4d49-j2crr_bd8fdc17-d2f2-4644-8789-c8188f91ce61/kube-rbac-proxy/0.log" Sep 29 20:12:09 crc kubenswrapper[4792]: I0929 20:12:09.752811 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-84958c4d49-j2crr_bd8fdc17-d2f2-4644-8789-c8188f91ce61/manager/0.log" Sep 29 20:12:09 crc kubenswrapper[4792]: I0929 20:12:09.763650 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-5d889d78cf-fcg79_1c191b6e-d1aa-4576-98da-db7178aed835/kube-rbac-proxy/0.log" Sep 29 20:12:09 crc kubenswrapper[4792]: I0929 20:12:09.859182 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-5d889d78cf-fcg79_1c191b6e-d1aa-4576-98da-db7178aed835/manager/0.log" Sep 29 20:12:09 crc kubenswrapper[4792]: I0929 20:12:09.936610 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-9f4696d94-d48vc_1d9cd325-38fc-4c7c-bd2f-51b86aa23d2e/manager/0.log" Sep 29 20:12:09 crc kubenswrapper[4792]: I0929 20:12:09.945318 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-9f4696d94-d48vc_1d9cd325-38fc-4c7c-bd2f-51b86aa23d2e/kube-rbac-proxy/0.log" Sep 29 20:12:10 crc kubenswrapper[4792]: I0929 20:12:10.168176 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-7d857cc749-fn8fk_ca7e36bc-4aa5-414f-92a4-db59399217b9/kube-rbac-proxy/0.log" Sep 29 20:12:10 crc kubenswrapper[4792]: I0929 20:12:10.235946 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-7d857cc749-fn8fk_ca7e36bc-4aa5-414f-92a4-db59399217b9/manager/0.log" Sep 29 20:12:10 crc kubenswrapper[4792]: I0929 20:12:10.331013 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-7975b88857-nfk4r_fdf51eb0-6b5f-43ad-ba01-c8ff12508dc0/kube-rbac-proxy/0.log" Sep 29 20:12:10 crc kubenswrapper[4792]: I0929 20:12:10.372287 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-7975b88857-nfk4r_fdf51eb0-6b5f-43ad-ba01-c8ff12508dc0/manager/0.log" Sep 29 20:12:10 crc kubenswrapper[4792]: I0929 20:12:10.430679 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-5bd55b4bff-swj98_b9148442-b4dc-4926-920d-33c9a00172fa/kube-rbac-proxy/0.log" Sep 29 20:12:10 crc kubenswrapper[4792]: I0929 20:12:10.583811 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-5bd55b4bff-swj98_b9148442-b4dc-4926-920d-33c9a00172fa/manager/0.log" Sep 29 20:12:10 crc kubenswrapper[4792]: I0929 20:12:10.605557 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-6d68dbc695-zzmf2_49160b59-f488-40f9-b23d-a3bccc3c2cb9/kube-rbac-proxy/0.log" Sep 29 20:12:10 crc kubenswrapper[4792]: I0929 20:12:10.643557 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-6d68dbc695-zzmf2_49160b59-f488-40f9-b23d-a3bccc3c2cb9/manager/0.log" Sep 29 20:12:10 crc kubenswrapper[4792]: I0929 20:12:10.791521 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-88c7-t4srt_af042430-9b25-44c8-8f30-19db90025d05/manager/0.log" Sep 29 20:12:10 crc kubenswrapper[4792]: I0929 20:12:10.804295 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-88c7-t4srt_af042430-9b25-44c8-8f30-19db90025d05/kube-rbac-proxy/0.log" Sep 29 20:12:10 crc kubenswrapper[4792]: I0929 20:12:10.956159 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-64d7b59854-rrnhb_020f5851-2dbc-464b-9217-6a3cb7a737a7/kube-rbac-proxy/0.log" Sep 29 20:12:11 crc kubenswrapper[4792]: I0929 20:12:11.031581 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-64d7b59854-rrnhb_020f5851-2dbc-464b-9217-6a3cb7a737a7/manager/0.log" Sep 29 20:12:11 crc kubenswrapper[4792]: I0929 20:12:11.113842 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-c7c776c96-4dfhr_12482564-55ba-46c6-857c-de815cddedc7/kube-rbac-proxy/0.log" Sep 29 20:12:11 crc kubenswrapper[4792]: I0929 20:12:11.249501 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-c7c776c96-4dfhr_12482564-55ba-46c6-857c-de815cddedc7/manager/0.log" Sep 29 20:12:11 crc kubenswrapper[4792]: I0929 20:12:11.379437 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-76fcc6dc7c-9wbkb_dd149347-201c-4ce2-abdd-d41e57d1813a/kube-rbac-proxy/0.log" Sep 29 20:12:11 crc kubenswrapper[4792]: I0929 20:12:11.384029 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-76fcc6dc7c-9wbkb_dd149347-201c-4ce2-abdd-d41e57d1813a/manager/0.log" Sep 29 20:12:11 crc kubenswrapper[4792]: I0929 20:12:11.528702 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-6d776955-44cz7_c0dd6d9d-3f07-4723-ae97-7adb0a4863b1/kube-rbac-proxy/0.log" Sep 29 20:12:11 crc kubenswrapper[4792]: I0929 20:12:11.566633 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-6d776955-44cz7_c0dd6d9d-3f07-4723-ae97-7adb0a4863b1/manager/0.log" Sep 29 20:12:11 crc kubenswrapper[4792]: I0929 20:12:11.738298 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-manager-57cc59b9c6-9f256_5e2b0240-3697-4ee1-9052-5e72c8bf386a/kube-rbac-proxy/0.log" Sep 29 20:12:11 crc kubenswrapper[4792]: I0929 20:12:11.959435 4792 patch_prober.go:28] interesting pod/machine-config-daemon-p5q59 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 20:12:11 crc kubenswrapper[4792]: I0929 20:12:11.959480 4792 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" podUID="0ae66548-086e-4ca9-bd6f-281ce46e7557" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 20:12:11 crc kubenswrapper[4792]: I0929 20:12:11.970626 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-operator-7f7c575847-m64w4_28b981e4-ec59-452c-950d-2b86f346df10/kube-rbac-proxy/0.log" Sep 29 20:12:12 crc kubenswrapper[4792]: I0929 20:12:12.188171 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-operator-7f7c575847-m64w4_28b981e4-ec59-452c-950d-2b86f346df10/operator/0.log" Sep 29 20:12:12 crc kubenswrapper[4792]: I0929 20:12:12.401024 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-index-9z9w7_b589f424-730a-4e38-8dfd-c1229f055e2a/registry-server/0.log" Sep 29 20:12:12 crc kubenswrapper[4792]: I0929 20:12:12.439028 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-9976ff44c-zc87x_30ffe357-8b65-4481-95f2-7b2e13fd5676/kube-rbac-proxy/0.log" Sep 29 20:12:12 crc kubenswrapper[4792]: I0929 20:12:12.769259 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-manager-57cc59b9c6-9f256_5e2b0240-3697-4ee1-9052-5e72c8bf386a/manager/0.log" Sep 29 20:12:12 crc kubenswrapper[4792]: I0929 20:12:12.925028 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-9976ff44c-zc87x_30ffe357-8b65-4481-95f2-7b2e13fd5676/manager/0.log" Sep 29 20:12:12 crc kubenswrapper[4792]: I0929 20:12:12.963502 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-589c58c6c-f8qhj_18b79acc-6db2-4b4f-8f85-0b65dfd800b3/kube-rbac-proxy/0.log" Sep 29 20:12:12 crc kubenswrapper[4792]: I0929 20:12:12.972823 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-589c58c6c-f8qhj_18b79acc-6db2-4b4f-8f85-0b65dfd800b3/manager/0.log" Sep 29 20:12:13 crc kubenswrapper[4792]: I0929 20:12:13.153831 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_rabbitmq-cluster-operator-manager-79d8469568-vxkfn_5bc872e6-ce23-49cc-8ae7-bf92e4edda47/operator/0.log" Sep 29 20:12:13 crc kubenswrapper[4792]: I0929 20:12:13.401789 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-bc7dc7bd9-4p4rl_5be754f6-b295-4ca1-8f47-5a827e39580a/manager/0.log" Sep 29 20:12:13 crc kubenswrapper[4792]: I0929 20:12:13.461642 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-bc7dc7bd9-4p4rl_5be754f6-b295-4ca1-8f47-5a827e39580a/kube-rbac-proxy/0.log" Sep 29 20:12:13 crc kubenswrapper[4792]: I0929 20:12:13.649286 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-b8d54b5d7-x5h9k_aa049eb9-e9cf-47c9-a06b-91e8c787e6c1/kube-rbac-proxy/0.log" Sep 29 20:12:13 crc kubenswrapper[4792]: I0929 20:12:13.669305 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-b8d54b5d7-x5h9k_aa049eb9-e9cf-47c9-a06b-91e8c787e6c1/manager/0.log" Sep 29 20:12:13 crc kubenswrapper[4792]: I0929 20:12:13.724951 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-f66b554c6-frkgk_cc2b4990-0306-4b03-b344-b2e186883c4c/kube-rbac-proxy/0.log" Sep 29 20:12:13 crc kubenswrapper[4792]: I0929 20:12:13.783374 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-f66b554c6-frkgk_cc2b4990-0306-4b03-b344-b2e186883c4c/manager/0.log" Sep 29 20:12:14 crc kubenswrapper[4792]: I0929 20:12:14.175418 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-76669f99c-m88jp_5ae40942-75a6-41a6-877a-4070bd348d32/kube-rbac-proxy/0.log" Sep 29 20:12:14 crc kubenswrapper[4792]: I0929 20:12:14.211545 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-76669f99c-m88jp_5ae40942-75a6-41a6-877a-4070bd348d32/manager/0.log" Sep 29 20:12:19 crc kubenswrapper[4792]: I0929 20:12:19.330402 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-j9nqt"] Sep 29 20:12:19 crc kubenswrapper[4792]: E0929 20:12:19.331197 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="db329c0f-ba62-46e4-88a7-fecddf8b4f2f" containerName="container-00" Sep 29 20:12:19 crc kubenswrapper[4792]: I0929 20:12:19.331209 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="db329c0f-ba62-46e4-88a7-fecddf8b4f2f" containerName="container-00" Sep 29 20:12:19 crc kubenswrapper[4792]: I0929 20:12:19.331428 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="db329c0f-ba62-46e4-88a7-fecddf8b4f2f" containerName="container-00" Sep 29 20:12:19 crc kubenswrapper[4792]: I0929 20:12:19.332699 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-j9nqt" Sep 29 20:12:19 crc kubenswrapper[4792]: I0929 20:12:19.352065 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-j9nqt"] Sep 29 20:12:19 crc kubenswrapper[4792]: I0929 20:12:19.524311 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/47215cd5-68f8-4040-a8a8-35bc95705795-utilities\") pod \"community-operators-j9nqt\" (UID: \"47215cd5-68f8-4040-a8a8-35bc95705795\") " pod="openshift-marketplace/community-operators-j9nqt" Sep 29 20:12:19 crc kubenswrapper[4792]: I0929 20:12:19.524499 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-44hzz\" (UniqueName: \"kubernetes.io/projected/47215cd5-68f8-4040-a8a8-35bc95705795-kube-api-access-44hzz\") pod \"community-operators-j9nqt\" (UID: \"47215cd5-68f8-4040-a8a8-35bc95705795\") " pod="openshift-marketplace/community-operators-j9nqt" Sep 29 20:12:19 crc kubenswrapper[4792]: I0929 20:12:19.524531 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/47215cd5-68f8-4040-a8a8-35bc95705795-catalog-content\") pod \"community-operators-j9nqt\" (UID: \"47215cd5-68f8-4040-a8a8-35bc95705795\") " pod="openshift-marketplace/community-operators-j9nqt" Sep 29 20:12:19 crc kubenswrapper[4792]: I0929 20:12:19.626049 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-44hzz\" (UniqueName: \"kubernetes.io/projected/47215cd5-68f8-4040-a8a8-35bc95705795-kube-api-access-44hzz\") pod \"community-operators-j9nqt\" (UID: \"47215cd5-68f8-4040-a8a8-35bc95705795\") " pod="openshift-marketplace/community-operators-j9nqt" Sep 29 20:12:19 crc kubenswrapper[4792]: I0929 20:12:19.626110 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/47215cd5-68f8-4040-a8a8-35bc95705795-catalog-content\") pod \"community-operators-j9nqt\" (UID: \"47215cd5-68f8-4040-a8a8-35bc95705795\") " pod="openshift-marketplace/community-operators-j9nqt" Sep 29 20:12:19 crc kubenswrapper[4792]: I0929 20:12:19.626248 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/47215cd5-68f8-4040-a8a8-35bc95705795-utilities\") pod \"community-operators-j9nqt\" (UID: \"47215cd5-68f8-4040-a8a8-35bc95705795\") " pod="openshift-marketplace/community-operators-j9nqt" Sep 29 20:12:19 crc kubenswrapper[4792]: I0929 20:12:19.626734 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/47215cd5-68f8-4040-a8a8-35bc95705795-utilities\") pod \"community-operators-j9nqt\" (UID: \"47215cd5-68f8-4040-a8a8-35bc95705795\") " pod="openshift-marketplace/community-operators-j9nqt" Sep 29 20:12:19 crc kubenswrapper[4792]: I0929 20:12:19.627247 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/47215cd5-68f8-4040-a8a8-35bc95705795-catalog-content\") pod \"community-operators-j9nqt\" (UID: \"47215cd5-68f8-4040-a8a8-35bc95705795\") " pod="openshift-marketplace/community-operators-j9nqt" Sep 29 20:12:19 crc kubenswrapper[4792]: I0929 20:12:19.672454 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-44hzz\" (UniqueName: \"kubernetes.io/projected/47215cd5-68f8-4040-a8a8-35bc95705795-kube-api-access-44hzz\") pod \"community-operators-j9nqt\" (UID: \"47215cd5-68f8-4040-a8a8-35bc95705795\") " pod="openshift-marketplace/community-operators-j9nqt" Sep 29 20:12:19 crc kubenswrapper[4792]: I0929 20:12:19.959276 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-j9nqt" Sep 29 20:12:20 crc kubenswrapper[4792]: I0929 20:12:20.488311 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-j9nqt"] Sep 29 20:12:20 crc kubenswrapper[4792]: I0929 20:12:20.847104 4792 generic.go:334] "Generic (PLEG): container finished" podID="47215cd5-68f8-4040-a8a8-35bc95705795" containerID="e04cccccf7276fd473b4f676f587c877dceaf1fcafcc9de462355b3f7bbae3e6" exitCode=0 Sep 29 20:12:20 crc kubenswrapper[4792]: I0929 20:12:20.847161 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-j9nqt" event={"ID":"47215cd5-68f8-4040-a8a8-35bc95705795","Type":"ContainerDied","Data":"e04cccccf7276fd473b4f676f587c877dceaf1fcafcc9de462355b3f7bbae3e6"} Sep 29 20:12:20 crc kubenswrapper[4792]: I0929 20:12:20.847194 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-j9nqt" event={"ID":"47215cd5-68f8-4040-a8a8-35bc95705795","Type":"ContainerStarted","Data":"b861040c6b3ac515909bd57e84e77888bb7d7e3db921ef01ec18250dd9a10062"} Sep 29 20:12:21 crc kubenswrapper[4792]: I0929 20:12:21.858221 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-j9nqt" event={"ID":"47215cd5-68f8-4040-a8a8-35bc95705795","Type":"ContainerStarted","Data":"0ac355954d13fef65746eab40d37c48ad93d46f6576c3c80bea286d64753c3b4"} Sep 29 20:12:22 crc kubenswrapper[4792]: I0929 20:12:22.867214 4792 generic.go:334] "Generic (PLEG): container finished" podID="47215cd5-68f8-4040-a8a8-35bc95705795" containerID="0ac355954d13fef65746eab40d37c48ad93d46f6576c3c80bea286d64753c3b4" exitCode=0 Sep 29 20:12:22 crc kubenswrapper[4792]: I0929 20:12:22.867550 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-j9nqt" event={"ID":"47215cd5-68f8-4040-a8a8-35bc95705795","Type":"ContainerDied","Data":"0ac355954d13fef65746eab40d37c48ad93d46f6576c3c80bea286d64753c3b4"} Sep 29 20:12:23 crc kubenswrapper[4792]: I0929 20:12:23.878333 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-j9nqt" event={"ID":"47215cd5-68f8-4040-a8a8-35bc95705795","Type":"ContainerStarted","Data":"1b9811fc4ab6eb10fdf7c9cfafdadc4d0d5e808f609c0d9087dc69aabc798472"} Sep 29 20:12:23 crc kubenswrapper[4792]: I0929 20:12:23.901892 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-j9nqt" podStartSLOduration=2.368438701 podStartE2EDuration="4.901875911s" podCreationTimestamp="2025-09-29 20:12:19 +0000 UTC" firstStartedPulling="2025-09-29 20:12:20.848654614 +0000 UTC m=+4552.841962010" lastFinishedPulling="2025-09-29 20:12:23.382091814 +0000 UTC m=+4555.375399220" observedRunningTime="2025-09-29 20:12:23.898254186 +0000 UTC m=+4555.891561602" watchObservedRunningTime="2025-09-29 20:12:23.901875911 +0000 UTC m=+4555.895183297" Sep 29 20:12:29 crc kubenswrapper[4792]: I0929 20:12:29.960365 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-j9nqt" Sep 29 20:12:29 crc kubenswrapper[4792]: I0929 20:12:29.960917 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-j9nqt" Sep 29 20:12:31 crc kubenswrapper[4792]: I0929 20:12:31.003304 4792 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/community-operators-j9nqt" podUID="47215cd5-68f8-4040-a8a8-35bc95705795" containerName="registry-server" probeResult="failure" output=< Sep 29 20:12:31 crc kubenswrapper[4792]: timeout: failed to connect service ":50051" within 1s Sep 29 20:12:31 crc kubenswrapper[4792]: > Sep 29 20:12:31 crc kubenswrapper[4792]: I0929 20:12:31.852824 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_control-plane-machine-set-operator-78cbb6b69f-857r4_7ac58a67-2de7-48ec-9a6c-f7cf37538bdd/control-plane-machine-set-operator/0.log" Sep 29 20:12:31 crc kubenswrapper[4792]: I0929 20:12:31.983900 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-lwr4w_39e18b3b-156d-46e5-9ace-51ee36c17614/kube-rbac-proxy/0.log" Sep 29 20:12:32 crc kubenswrapper[4792]: I0929 20:12:32.024314 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-lwr4w_39e18b3b-156d-46e5-9ace-51ee36c17614/machine-api-operator/0.log" Sep 29 20:12:40 crc kubenswrapper[4792]: I0929 20:12:40.439473 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-j9nqt" Sep 29 20:12:40 crc kubenswrapper[4792]: I0929 20:12:40.495633 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-j9nqt" Sep 29 20:12:40 crc kubenswrapper[4792]: I0929 20:12:40.670512 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-j9nqt"] Sep 29 20:12:41 crc kubenswrapper[4792]: I0929 20:12:41.960090 4792 patch_prober.go:28] interesting pod/machine-config-daemon-p5q59 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 20:12:41 crc kubenswrapper[4792]: I0929 20:12:41.961976 4792 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" podUID="0ae66548-086e-4ca9-bd6f-281ce46e7557" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 20:12:41 crc kubenswrapper[4792]: I0929 20:12:41.962102 4792 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" Sep 29 20:12:41 crc kubenswrapper[4792]: I0929 20:12:41.963006 4792 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"0c32d1da37ddbc11c79c6be27e5d9a54bae2c6e1dd6cff428ba145dc4ed7552e"} pod="openshift-machine-config-operator/machine-config-daemon-p5q59" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 29 20:12:41 crc kubenswrapper[4792]: I0929 20:12:41.963147 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" podUID="0ae66548-086e-4ca9-bd6f-281ce46e7557" containerName="machine-config-daemon" containerID="cri-o://0c32d1da37ddbc11c79c6be27e5d9a54bae2c6e1dd6cff428ba145dc4ed7552e" gracePeriod=600 Sep 29 20:12:42 crc kubenswrapper[4792]: I0929 20:12:42.065808 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-j9nqt" podUID="47215cd5-68f8-4040-a8a8-35bc95705795" containerName="registry-server" containerID="cri-o://1b9811fc4ab6eb10fdf7c9cfafdadc4d0d5e808f609c0d9087dc69aabc798472" gracePeriod=2 Sep 29 20:12:42 crc kubenswrapper[4792]: E0929 20:12:42.088570 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p5q59_openshift-machine-config-operator(0ae66548-086e-4ca9-bd6f-281ce46e7557)\"" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" podUID="0ae66548-086e-4ca9-bd6f-281ce46e7557" Sep 29 20:12:42 crc kubenswrapper[4792]: I0929 20:12:42.607025 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-j9nqt" Sep 29 20:12:42 crc kubenswrapper[4792]: I0929 20:12:42.792984 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-44hzz\" (UniqueName: \"kubernetes.io/projected/47215cd5-68f8-4040-a8a8-35bc95705795-kube-api-access-44hzz\") pod \"47215cd5-68f8-4040-a8a8-35bc95705795\" (UID: \"47215cd5-68f8-4040-a8a8-35bc95705795\") " Sep 29 20:12:42 crc kubenswrapper[4792]: I0929 20:12:42.793084 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/47215cd5-68f8-4040-a8a8-35bc95705795-utilities\") pod \"47215cd5-68f8-4040-a8a8-35bc95705795\" (UID: \"47215cd5-68f8-4040-a8a8-35bc95705795\") " Sep 29 20:12:42 crc kubenswrapper[4792]: I0929 20:12:42.793243 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/47215cd5-68f8-4040-a8a8-35bc95705795-catalog-content\") pod \"47215cd5-68f8-4040-a8a8-35bc95705795\" (UID: \"47215cd5-68f8-4040-a8a8-35bc95705795\") " Sep 29 20:12:42 crc kubenswrapper[4792]: I0929 20:12:42.793743 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/47215cd5-68f8-4040-a8a8-35bc95705795-utilities" (OuterVolumeSpecName: "utilities") pod "47215cd5-68f8-4040-a8a8-35bc95705795" (UID: "47215cd5-68f8-4040-a8a8-35bc95705795"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 20:12:42 crc kubenswrapper[4792]: I0929 20:12:42.805068 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/47215cd5-68f8-4040-a8a8-35bc95705795-kube-api-access-44hzz" (OuterVolumeSpecName: "kube-api-access-44hzz") pod "47215cd5-68f8-4040-a8a8-35bc95705795" (UID: "47215cd5-68f8-4040-a8a8-35bc95705795"). InnerVolumeSpecName "kube-api-access-44hzz". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 20:12:42 crc kubenswrapper[4792]: I0929 20:12:42.852389 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/47215cd5-68f8-4040-a8a8-35bc95705795-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "47215cd5-68f8-4040-a8a8-35bc95705795" (UID: "47215cd5-68f8-4040-a8a8-35bc95705795"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 20:12:42 crc kubenswrapper[4792]: I0929 20:12:42.895333 4792 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/47215cd5-68f8-4040-a8a8-35bc95705795-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 20:12:42 crc kubenswrapper[4792]: I0929 20:12:42.895373 4792 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/47215cd5-68f8-4040-a8a8-35bc95705795-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 20:12:42 crc kubenswrapper[4792]: I0929 20:12:42.895388 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-44hzz\" (UniqueName: \"kubernetes.io/projected/47215cd5-68f8-4040-a8a8-35bc95705795-kube-api-access-44hzz\") on node \"crc\" DevicePath \"\"" Sep 29 20:12:43 crc kubenswrapper[4792]: I0929 20:12:43.076072 4792 generic.go:334] "Generic (PLEG): container finished" podID="0ae66548-086e-4ca9-bd6f-281ce46e7557" containerID="0c32d1da37ddbc11c79c6be27e5d9a54bae2c6e1dd6cff428ba145dc4ed7552e" exitCode=0 Sep 29 20:12:43 crc kubenswrapper[4792]: I0929 20:12:43.076120 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" event={"ID":"0ae66548-086e-4ca9-bd6f-281ce46e7557","Type":"ContainerDied","Data":"0c32d1da37ddbc11c79c6be27e5d9a54bae2c6e1dd6cff428ba145dc4ed7552e"} Sep 29 20:12:43 crc kubenswrapper[4792]: I0929 20:12:43.076485 4792 scope.go:117] "RemoveContainer" containerID="edbe2c47c6cb56af81ddbfffa93aad15dfa1cbd4b7f9ffe576a99abe2914aced" Sep 29 20:12:43 crc kubenswrapper[4792]: I0929 20:12:43.077116 4792 scope.go:117] "RemoveContainer" containerID="0c32d1da37ddbc11c79c6be27e5d9a54bae2c6e1dd6cff428ba145dc4ed7552e" Sep 29 20:12:43 crc kubenswrapper[4792]: E0929 20:12:43.077363 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p5q59_openshift-machine-config-operator(0ae66548-086e-4ca9-bd6f-281ce46e7557)\"" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" podUID="0ae66548-086e-4ca9-bd6f-281ce46e7557" Sep 29 20:12:43 crc kubenswrapper[4792]: I0929 20:12:43.079590 4792 generic.go:334] "Generic (PLEG): container finished" podID="47215cd5-68f8-4040-a8a8-35bc95705795" containerID="1b9811fc4ab6eb10fdf7c9cfafdadc4d0d5e808f609c0d9087dc69aabc798472" exitCode=0 Sep 29 20:12:43 crc kubenswrapper[4792]: I0929 20:12:43.079629 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-j9nqt" event={"ID":"47215cd5-68f8-4040-a8a8-35bc95705795","Type":"ContainerDied","Data":"1b9811fc4ab6eb10fdf7c9cfafdadc4d0d5e808f609c0d9087dc69aabc798472"} Sep 29 20:12:43 crc kubenswrapper[4792]: I0929 20:12:43.079657 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-j9nqt" event={"ID":"47215cd5-68f8-4040-a8a8-35bc95705795","Type":"ContainerDied","Data":"b861040c6b3ac515909bd57e84e77888bb7d7e3db921ef01ec18250dd9a10062"} Sep 29 20:12:43 crc kubenswrapper[4792]: I0929 20:12:43.079726 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-j9nqt" Sep 29 20:12:43 crc kubenswrapper[4792]: I0929 20:12:43.166425 4792 scope.go:117] "RemoveContainer" containerID="1b9811fc4ab6eb10fdf7c9cfafdadc4d0d5e808f609c0d9087dc69aabc798472" Sep 29 20:12:43 crc kubenswrapper[4792]: I0929 20:12:43.167664 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-j9nqt"] Sep 29 20:12:43 crc kubenswrapper[4792]: I0929 20:12:43.183781 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-j9nqt"] Sep 29 20:12:43 crc kubenswrapper[4792]: I0929 20:12:43.190667 4792 scope.go:117] "RemoveContainer" containerID="0ac355954d13fef65746eab40d37c48ad93d46f6576c3c80bea286d64753c3b4" Sep 29 20:12:43 crc kubenswrapper[4792]: I0929 20:12:43.219831 4792 scope.go:117] "RemoveContainer" containerID="e04cccccf7276fd473b4f676f587c877dceaf1fcafcc9de462355b3f7bbae3e6" Sep 29 20:12:43 crc kubenswrapper[4792]: I0929 20:12:43.256401 4792 scope.go:117] "RemoveContainer" containerID="1b9811fc4ab6eb10fdf7c9cfafdadc4d0d5e808f609c0d9087dc69aabc798472" Sep 29 20:12:43 crc kubenswrapper[4792]: E0929 20:12:43.257227 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1b9811fc4ab6eb10fdf7c9cfafdadc4d0d5e808f609c0d9087dc69aabc798472\": container with ID starting with 1b9811fc4ab6eb10fdf7c9cfafdadc4d0d5e808f609c0d9087dc69aabc798472 not found: ID does not exist" containerID="1b9811fc4ab6eb10fdf7c9cfafdadc4d0d5e808f609c0d9087dc69aabc798472" Sep 29 20:12:43 crc kubenswrapper[4792]: I0929 20:12:43.257382 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1b9811fc4ab6eb10fdf7c9cfafdadc4d0d5e808f609c0d9087dc69aabc798472"} err="failed to get container status \"1b9811fc4ab6eb10fdf7c9cfafdadc4d0d5e808f609c0d9087dc69aabc798472\": rpc error: code = NotFound desc = could not find container \"1b9811fc4ab6eb10fdf7c9cfafdadc4d0d5e808f609c0d9087dc69aabc798472\": container with ID starting with 1b9811fc4ab6eb10fdf7c9cfafdadc4d0d5e808f609c0d9087dc69aabc798472 not found: ID does not exist" Sep 29 20:12:43 crc kubenswrapper[4792]: I0929 20:12:43.257483 4792 scope.go:117] "RemoveContainer" containerID="0ac355954d13fef65746eab40d37c48ad93d46f6576c3c80bea286d64753c3b4" Sep 29 20:12:43 crc kubenswrapper[4792]: E0929 20:12:43.257904 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0ac355954d13fef65746eab40d37c48ad93d46f6576c3c80bea286d64753c3b4\": container with ID starting with 0ac355954d13fef65746eab40d37c48ad93d46f6576c3c80bea286d64753c3b4 not found: ID does not exist" containerID="0ac355954d13fef65746eab40d37c48ad93d46f6576c3c80bea286d64753c3b4" Sep 29 20:12:43 crc kubenswrapper[4792]: I0929 20:12:43.257981 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0ac355954d13fef65746eab40d37c48ad93d46f6576c3c80bea286d64753c3b4"} err="failed to get container status \"0ac355954d13fef65746eab40d37c48ad93d46f6576c3c80bea286d64753c3b4\": rpc error: code = NotFound desc = could not find container \"0ac355954d13fef65746eab40d37c48ad93d46f6576c3c80bea286d64753c3b4\": container with ID starting with 0ac355954d13fef65746eab40d37c48ad93d46f6576c3c80bea286d64753c3b4 not found: ID does not exist" Sep 29 20:12:43 crc kubenswrapper[4792]: I0929 20:12:43.258050 4792 scope.go:117] "RemoveContainer" containerID="e04cccccf7276fd473b4f676f587c877dceaf1fcafcc9de462355b3f7bbae3e6" Sep 29 20:12:43 crc kubenswrapper[4792]: E0929 20:12:43.258443 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e04cccccf7276fd473b4f676f587c877dceaf1fcafcc9de462355b3f7bbae3e6\": container with ID starting with e04cccccf7276fd473b4f676f587c877dceaf1fcafcc9de462355b3f7bbae3e6 not found: ID does not exist" containerID="e04cccccf7276fd473b4f676f587c877dceaf1fcafcc9de462355b3f7bbae3e6" Sep 29 20:12:43 crc kubenswrapper[4792]: I0929 20:12:43.258533 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e04cccccf7276fd473b4f676f587c877dceaf1fcafcc9de462355b3f7bbae3e6"} err="failed to get container status \"e04cccccf7276fd473b4f676f587c877dceaf1fcafcc9de462355b3f7bbae3e6\": rpc error: code = NotFound desc = could not find container \"e04cccccf7276fd473b4f676f587c877dceaf1fcafcc9de462355b3f7bbae3e6\": container with ID starting with e04cccccf7276fd473b4f676f587c877dceaf1fcafcc9de462355b3f7bbae3e6 not found: ID does not exist" Sep 29 20:12:44 crc kubenswrapper[4792]: I0929 20:12:44.966746 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-5b446d88c5-sb29r_d603ca4a-b40c-439f-b7ed-09a279e9d727/cert-manager-controller/0.log" Sep 29 20:12:45 crc kubenswrapper[4792]: I0929 20:12:45.024347 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="47215cd5-68f8-4040-a8a8-35bc95705795" path="/var/lib/kubelet/pods/47215cd5-68f8-4040-a8a8-35bc95705795/volumes" Sep 29 20:12:45 crc kubenswrapper[4792]: I0929 20:12:45.056599 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-cainjector-7f985d654d-pxgvr_4bdb9002-6a61-4c32-a32e-3a76cc24a38e/cert-manager-cainjector/0.log" Sep 29 20:12:45 crc kubenswrapper[4792]: I0929 20:12:45.123804 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-webhook-5655c58dd6-mx55q_b48f55d3-c9a5-4973-b233-f59ced6a17e6/cert-manager-webhook/0.log" Sep 29 20:12:55 crc kubenswrapper[4792]: I0929 20:12:55.016010 4792 scope.go:117] "RemoveContainer" containerID="0c32d1da37ddbc11c79c6be27e5d9a54bae2c6e1dd6cff428ba145dc4ed7552e" Sep 29 20:12:55 crc kubenswrapper[4792]: E0929 20:12:55.016700 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p5q59_openshift-machine-config-operator(0ae66548-086e-4ca9-bd6f-281ce46e7557)\"" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" podUID="0ae66548-086e-4ca9-bd6f-281ce46e7557" Sep 29 20:12:58 crc kubenswrapper[4792]: I0929 20:12:58.473278 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-console-plugin-864bb6dfb5-ps4tp_40d01e5b-1274-48a1-8510-4386dd7150bb/nmstate-console-plugin/0.log" Sep 29 20:12:58 crc kubenswrapper[4792]: I0929 20:12:58.621535 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-handler-w7wwz_7d8ab4ec-b506-4549-be62-9b914b9cb3f3/nmstate-handler/0.log" Sep 29 20:12:58 crc kubenswrapper[4792]: I0929 20:12:58.748477 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-58fcddf996-kjrzl_a1f9e458-fc58-4f84-89fc-9196c747d6ba/kube-rbac-proxy/0.log" Sep 29 20:12:58 crc kubenswrapper[4792]: I0929 20:12:58.754475 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-58fcddf996-kjrzl_a1f9e458-fc58-4f84-89fc-9196c747d6ba/nmstate-metrics/0.log" Sep 29 20:12:58 crc kubenswrapper[4792]: I0929 20:12:58.919280 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-operator-5d6f6cfd66-qr5w9_6a491845-f4ef-4f82-b716-d46be2982350/nmstate-operator/0.log" Sep 29 20:12:59 crc kubenswrapper[4792]: I0929 20:12:59.005734 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-webhook-6d689559c5-9gcgx_66a5aaa8-8ce1-4d34-a58e-843ff50ca9ef/nmstate-webhook/0.log" Sep 29 20:13:07 crc kubenswrapper[4792]: I0929 20:13:07.016105 4792 scope.go:117] "RemoveContainer" containerID="0c32d1da37ddbc11c79c6be27e5d9a54bae2c6e1dd6cff428ba145dc4ed7552e" Sep 29 20:13:07 crc kubenswrapper[4792]: E0929 20:13:07.016723 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p5q59_openshift-machine-config-operator(0ae66548-086e-4ca9-bd6f-281ce46e7557)\"" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" podUID="0ae66548-086e-4ca9-bd6f-281ce46e7557" Sep 29 20:13:13 crc kubenswrapper[4792]: I0929 20:13:13.670243 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-5d688f5ffc-kjkvm_af42de4e-dfaa-4178-b742-d4388d56b58a/kube-rbac-proxy/0.log" Sep 29 20:13:13 crc kubenswrapper[4792]: I0929 20:13:13.781498 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-5d688f5ffc-kjkvm_af42de4e-dfaa-4178-b742-d4388d56b58a/controller/0.log" Sep 29 20:13:13 crc kubenswrapper[4792]: I0929 20:13:13.921104 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-webhook-server-5478bdb765-fw449_e4991702-0228-4b1e-abc9-01d614664746/frr-k8s-webhook-server/0.log" Sep 29 20:13:14 crc kubenswrapper[4792]: I0929 20:13:14.012474 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-wrpbj_698fb3db-6e7c-478d-bfae-70fbfa85e384/cp-frr-files/0.log" Sep 29 20:13:14 crc kubenswrapper[4792]: I0929 20:13:14.209417 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-wrpbj_698fb3db-6e7c-478d-bfae-70fbfa85e384/cp-reloader/0.log" Sep 29 20:13:14 crc kubenswrapper[4792]: I0929 20:13:14.237371 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-wrpbj_698fb3db-6e7c-478d-bfae-70fbfa85e384/cp-reloader/0.log" Sep 29 20:13:14 crc kubenswrapper[4792]: I0929 20:13:14.250115 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-wrpbj_698fb3db-6e7c-478d-bfae-70fbfa85e384/cp-metrics/0.log" Sep 29 20:13:14 crc kubenswrapper[4792]: I0929 20:13:14.272790 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-wrpbj_698fb3db-6e7c-478d-bfae-70fbfa85e384/cp-frr-files/0.log" Sep 29 20:13:14 crc kubenswrapper[4792]: I0929 20:13:14.414385 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-wrpbj_698fb3db-6e7c-478d-bfae-70fbfa85e384/cp-frr-files/0.log" Sep 29 20:13:14 crc kubenswrapper[4792]: I0929 20:13:14.425096 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-wrpbj_698fb3db-6e7c-478d-bfae-70fbfa85e384/cp-reloader/0.log" Sep 29 20:13:14 crc kubenswrapper[4792]: I0929 20:13:14.462890 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-wrpbj_698fb3db-6e7c-478d-bfae-70fbfa85e384/cp-metrics/0.log" Sep 29 20:13:14 crc kubenswrapper[4792]: I0929 20:13:14.491866 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-wrpbj_698fb3db-6e7c-478d-bfae-70fbfa85e384/cp-metrics/0.log" Sep 29 20:13:14 crc kubenswrapper[4792]: I0929 20:13:14.650872 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-wrpbj_698fb3db-6e7c-478d-bfae-70fbfa85e384/cp-frr-files/0.log" Sep 29 20:13:14 crc kubenswrapper[4792]: I0929 20:13:14.693786 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-wrpbj_698fb3db-6e7c-478d-bfae-70fbfa85e384/cp-metrics/0.log" Sep 29 20:13:14 crc kubenswrapper[4792]: I0929 20:13:14.698244 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-wrpbj_698fb3db-6e7c-478d-bfae-70fbfa85e384/controller/0.log" Sep 29 20:13:14 crc kubenswrapper[4792]: I0929 20:13:14.699445 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-wrpbj_698fb3db-6e7c-478d-bfae-70fbfa85e384/cp-reloader/0.log" Sep 29 20:13:14 crc kubenswrapper[4792]: I0929 20:13:14.910670 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-wrpbj_698fb3db-6e7c-478d-bfae-70fbfa85e384/frr-metrics/0.log" Sep 29 20:13:14 crc kubenswrapper[4792]: I0929 20:13:14.938183 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-wrpbj_698fb3db-6e7c-478d-bfae-70fbfa85e384/kube-rbac-proxy/0.log" Sep 29 20:13:14 crc kubenswrapper[4792]: I0929 20:13:14.938498 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-wrpbj_698fb3db-6e7c-478d-bfae-70fbfa85e384/kube-rbac-proxy-frr/0.log" Sep 29 20:13:15 crc kubenswrapper[4792]: I0929 20:13:15.148753 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-wrpbj_698fb3db-6e7c-478d-bfae-70fbfa85e384/reloader/0.log" Sep 29 20:13:15 crc kubenswrapper[4792]: I0929 20:13:15.340751 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-controller-manager-798fbb9bbf-rvlk8_f81e5f81-aa7e-4c65-900e-9a5929ca038b/manager/0.log" Sep 29 20:13:15 crc kubenswrapper[4792]: I0929 20:13:15.559324 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-webhook-server-5888bf57-l785s_19a31c23-7b44-4b0e-a627-1891480c5e03/webhook-server/0.log" Sep 29 20:13:15 crc kubenswrapper[4792]: I0929 20:13:15.656639 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-8csps_c8d5189a-0868-46dc-881c-077f4d5be810/kube-rbac-proxy/0.log" Sep 29 20:13:16 crc kubenswrapper[4792]: I0929 20:13:16.182772 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-wrpbj_698fb3db-6e7c-478d-bfae-70fbfa85e384/frr/0.log" Sep 29 20:13:16 crc kubenswrapper[4792]: I0929 20:13:16.257289 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-8csps_c8d5189a-0868-46dc-881c-077f4d5be810/speaker/0.log" Sep 29 20:13:18 crc kubenswrapper[4792]: I0929 20:13:18.015227 4792 scope.go:117] "RemoveContainer" containerID="0c32d1da37ddbc11c79c6be27e5d9a54bae2c6e1dd6cff428ba145dc4ed7552e" Sep 29 20:13:18 crc kubenswrapper[4792]: E0929 20:13:18.015741 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p5q59_openshift-machine-config-operator(0ae66548-086e-4ca9-bd6f-281ce46e7557)\"" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" podUID="0ae66548-086e-4ca9-bd6f-281ce46e7557" Sep 29 20:13:28 crc kubenswrapper[4792]: I0929 20:13:28.967543 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcv7fhk_eca64cc0-f739-41b8-812c-55536fc117b7/util/0.log" Sep 29 20:13:29 crc kubenswrapper[4792]: I0929 20:13:29.132511 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcv7fhk_eca64cc0-f739-41b8-812c-55536fc117b7/pull/0.log" Sep 29 20:13:29 crc kubenswrapper[4792]: I0929 20:13:29.149340 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcv7fhk_eca64cc0-f739-41b8-812c-55536fc117b7/util/0.log" Sep 29 20:13:29 crc kubenswrapper[4792]: I0929 20:13:29.218574 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcv7fhk_eca64cc0-f739-41b8-812c-55536fc117b7/pull/0.log" Sep 29 20:13:29 crc kubenswrapper[4792]: I0929 20:13:29.339721 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcv7fhk_eca64cc0-f739-41b8-812c-55536fc117b7/extract/0.log" Sep 29 20:13:29 crc kubenswrapper[4792]: I0929 20:13:29.366150 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcv7fhk_eca64cc0-f739-41b8-812c-55536fc117b7/util/0.log" Sep 29 20:13:29 crc kubenswrapper[4792]: I0929 20:13:29.368239 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcv7fhk_eca64cc0-f739-41b8-812c-55536fc117b7/pull/0.log" Sep 29 20:13:29 crc kubenswrapper[4792]: I0929 20:13:29.558455 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-7dj2p_a3990f16-15c9-49ab-9e7c-ded88e6f7043/extract-utilities/0.log" Sep 29 20:13:29 crc kubenswrapper[4792]: I0929 20:13:29.701126 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-7dj2p_a3990f16-15c9-49ab-9e7c-ded88e6f7043/extract-content/0.log" Sep 29 20:13:29 crc kubenswrapper[4792]: I0929 20:13:29.713770 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-7dj2p_a3990f16-15c9-49ab-9e7c-ded88e6f7043/extract-utilities/0.log" Sep 29 20:13:29 crc kubenswrapper[4792]: I0929 20:13:29.742331 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-7dj2p_a3990f16-15c9-49ab-9e7c-ded88e6f7043/extract-content/0.log" Sep 29 20:13:29 crc kubenswrapper[4792]: I0929 20:13:29.924376 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-7dj2p_a3990f16-15c9-49ab-9e7c-ded88e6f7043/extract-utilities/0.log" Sep 29 20:13:29 crc kubenswrapper[4792]: I0929 20:13:29.935625 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-7dj2p_a3990f16-15c9-49ab-9e7c-ded88e6f7043/extract-content/0.log" Sep 29 20:13:30 crc kubenswrapper[4792]: I0929 20:13:30.231162 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-dbs72_c175ab0e-c63b-4263-bca3-ec28d3165c93/extract-utilities/0.log" Sep 29 20:13:30 crc kubenswrapper[4792]: I0929 20:13:30.536474 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-7dj2p_a3990f16-15c9-49ab-9e7c-ded88e6f7043/registry-server/0.log" Sep 29 20:13:30 crc kubenswrapper[4792]: I0929 20:13:30.661239 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-dbs72_c175ab0e-c63b-4263-bca3-ec28d3165c93/extract-utilities/0.log" Sep 29 20:13:30 crc kubenswrapper[4792]: I0929 20:13:30.680032 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-dbs72_c175ab0e-c63b-4263-bca3-ec28d3165c93/extract-content/0.log" Sep 29 20:13:30 crc kubenswrapper[4792]: I0929 20:13:30.684724 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-dbs72_c175ab0e-c63b-4263-bca3-ec28d3165c93/extract-content/0.log" Sep 29 20:13:30 crc kubenswrapper[4792]: I0929 20:13:30.944649 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-dbs72_c175ab0e-c63b-4263-bca3-ec28d3165c93/extract-utilities/0.log" Sep 29 20:13:30 crc kubenswrapper[4792]: I0929 20:13:30.991422 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-dbs72_c175ab0e-c63b-4263-bca3-ec28d3165c93/extract-content/0.log" Sep 29 20:13:31 crc kubenswrapper[4792]: I0929 20:13:31.210376 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96x69rv_d52e4791-ce38-457b-a2b2-83e5a4f491ab/util/0.log" Sep 29 20:13:31 crc kubenswrapper[4792]: I0929 20:13:31.469218 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96x69rv_d52e4791-ce38-457b-a2b2-83e5a4f491ab/util/0.log" Sep 29 20:13:31 crc kubenswrapper[4792]: I0929 20:13:31.489841 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96x69rv_d52e4791-ce38-457b-a2b2-83e5a4f491ab/pull/0.log" Sep 29 20:13:31 crc kubenswrapper[4792]: I0929 20:13:31.561643 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-dbs72_c175ab0e-c63b-4263-bca3-ec28d3165c93/registry-server/0.log" Sep 29 20:13:31 crc kubenswrapper[4792]: I0929 20:13:31.586780 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96x69rv_d52e4791-ce38-457b-a2b2-83e5a4f491ab/pull/0.log" Sep 29 20:13:31 crc kubenswrapper[4792]: I0929 20:13:31.703628 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96x69rv_d52e4791-ce38-457b-a2b2-83e5a4f491ab/util/0.log" Sep 29 20:13:31 crc kubenswrapper[4792]: I0929 20:13:31.758617 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96x69rv_d52e4791-ce38-457b-a2b2-83e5a4f491ab/extract/0.log" Sep 29 20:13:31 crc kubenswrapper[4792]: I0929 20:13:31.768948 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96x69rv_d52e4791-ce38-457b-a2b2-83e5a4f491ab/pull/0.log" Sep 29 20:13:31 crc kubenswrapper[4792]: I0929 20:13:31.896246 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-mjdnk_7ae10600-0f4b-4b98-b304-a13cb5283d63/marketplace-operator/0.log" Sep 29 20:13:32 crc kubenswrapper[4792]: I0929 20:13:32.000234 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-r5rqb_e799b8b0-ae24-4880-ad4c-11dc094789f4/extract-utilities/0.log" Sep 29 20:13:32 crc kubenswrapper[4792]: I0929 20:13:32.138404 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-r5rqb_e799b8b0-ae24-4880-ad4c-11dc094789f4/extract-utilities/0.log" Sep 29 20:13:32 crc kubenswrapper[4792]: I0929 20:13:32.181901 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-r5rqb_e799b8b0-ae24-4880-ad4c-11dc094789f4/extract-content/0.log" Sep 29 20:13:32 crc kubenswrapper[4792]: I0929 20:13:32.210297 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-r5rqb_e799b8b0-ae24-4880-ad4c-11dc094789f4/extract-content/0.log" Sep 29 20:13:32 crc kubenswrapper[4792]: I0929 20:13:32.375961 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-r5rqb_e799b8b0-ae24-4880-ad4c-11dc094789f4/extract-utilities/0.log" Sep 29 20:13:32 crc kubenswrapper[4792]: I0929 20:13:32.442772 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-r5rqb_e799b8b0-ae24-4880-ad4c-11dc094789f4/extract-content/0.log" Sep 29 20:13:32 crc kubenswrapper[4792]: I0929 20:13:32.511309 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-dh2pn_74049985-d99b-416d-80a0-2f73a2253f79/extract-utilities/0.log" Sep 29 20:13:32 crc kubenswrapper[4792]: I0929 20:13:32.520072 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-r5rqb_e799b8b0-ae24-4880-ad4c-11dc094789f4/registry-server/0.log" Sep 29 20:13:32 crc kubenswrapper[4792]: I0929 20:13:32.703981 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-dh2pn_74049985-d99b-416d-80a0-2f73a2253f79/extract-utilities/0.log" Sep 29 20:13:32 crc kubenswrapper[4792]: I0929 20:13:32.706188 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-dh2pn_74049985-d99b-416d-80a0-2f73a2253f79/extract-content/0.log" Sep 29 20:13:32 crc kubenswrapper[4792]: I0929 20:13:32.710073 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-dh2pn_74049985-d99b-416d-80a0-2f73a2253f79/extract-content/0.log" Sep 29 20:13:32 crc kubenswrapper[4792]: I0929 20:13:32.897203 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-dh2pn_74049985-d99b-416d-80a0-2f73a2253f79/extract-utilities/0.log" Sep 29 20:13:32 crc kubenswrapper[4792]: I0929 20:13:32.950492 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-dh2pn_74049985-d99b-416d-80a0-2f73a2253f79/extract-content/0.log" Sep 29 20:13:33 crc kubenswrapper[4792]: I0929 20:13:33.014878 4792 scope.go:117] "RemoveContainer" containerID="0c32d1da37ddbc11c79c6be27e5d9a54bae2c6e1dd6cff428ba145dc4ed7552e" Sep 29 20:13:33 crc kubenswrapper[4792]: E0929 20:13:33.015101 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p5q59_openshift-machine-config-operator(0ae66548-086e-4ca9-bd6f-281ce46e7557)\"" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" podUID="0ae66548-086e-4ca9-bd6f-281ce46e7557" Sep 29 20:13:33 crc kubenswrapper[4792]: I0929 20:13:33.488082 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-dh2pn_74049985-d99b-416d-80a0-2f73a2253f79/registry-server/0.log" Sep 29 20:13:44 crc kubenswrapper[4792]: I0929 20:13:44.016081 4792 scope.go:117] "RemoveContainer" containerID="0c32d1da37ddbc11c79c6be27e5d9a54bae2c6e1dd6cff428ba145dc4ed7552e" Sep 29 20:13:44 crc kubenswrapper[4792]: E0929 20:13:44.016730 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p5q59_openshift-machine-config-operator(0ae66548-086e-4ca9-bd6f-281ce46e7557)\"" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" podUID="0ae66548-086e-4ca9-bd6f-281ce46e7557" Sep 29 20:13:57 crc kubenswrapper[4792]: I0929 20:13:57.016001 4792 scope.go:117] "RemoveContainer" containerID="0c32d1da37ddbc11c79c6be27e5d9a54bae2c6e1dd6cff428ba145dc4ed7552e" Sep 29 20:13:57 crc kubenswrapper[4792]: E0929 20:13:57.016726 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p5q59_openshift-machine-config-operator(0ae66548-086e-4ca9-bd6f-281ce46e7557)\"" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" podUID="0ae66548-086e-4ca9-bd6f-281ce46e7557" Sep 29 20:14:05 crc kubenswrapper[4792]: I0929 20:14:05.100276 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-h5zvx"] Sep 29 20:14:05 crc kubenswrapper[4792]: E0929 20:14:05.101113 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="47215cd5-68f8-4040-a8a8-35bc95705795" containerName="extract-content" Sep 29 20:14:05 crc kubenswrapper[4792]: I0929 20:14:05.101128 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="47215cd5-68f8-4040-a8a8-35bc95705795" containerName="extract-content" Sep 29 20:14:05 crc kubenswrapper[4792]: E0929 20:14:05.101142 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="47215cd5-68f8-4040-a8a8-35bc95705795" containerName="extract-utilities" Sep 29 20:14:05 crc kubenswrapper[4792]: I0929 20:14:05.101150 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="47215cd5-68f8-4040-a8a8-35bc95705795" containerName="extract-utilities" Sep 29 20:14:05 crc kubenswrapper[4792]: E0929 20:14:05.101161 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="47215cd5-68f8-4040-a8a8-35bc95705795" containerName="registry-server" Sep 29 20:14:05 crc kubenswrapper[4792]: I0929 20:14:05.101167 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="47215cd5-68f8-4040-a8a8-35bc95705795" containerName="registry-server" Sep 29 20:14:05 crc kubenswrapper[4792]: I0929 20:14:05.101341 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="47215cd5-68f8-4040-a8a8-35bc95705795" containerName="registry-server" Sep 29 20:14:05 crc kubenswrapper[4792]: I0929 20:14:05.102605 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-h5zvx" Sep 29 20:14:05 crc kubenswrapper[4792]: I0929 20:14:05.112123 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-h5zvx"] Sep 29 20:14:05 crc kubenswrapper[4792]: I0929 20:14:05.188645 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9792abaf-b40e-4b73-99e0-75a7fab9f72c-catalog-content\") pod \"redhat-marketplace-h5zvx\" (UID: \"9792abaf-b40e-4b73-99e0-75a7fab9f72c\") " pod="openshift-marketplace/redhat-marketplace-h5zvx" Sep 29 20:14:05 crc kubenswrapper[4792]: I0929 20:14:05.188689 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f6jbc\" (UniqueName: \"kubernetes.io/projected/9792abaf-b40e-4b73-99e0-75a7fab9f72c-kube-api-access-f6jbc\") pod \"redhat-marketplace-h5zvx\" (UID: \"9792abaf-b40e-4b73-99e0-75a7fab9f72c\") " pod="openshift-marketplace/redhat-marketplace-h5zvx" Sep 29 20:14:05 crc kubenswrapper[4792]: I0929 20:14:05.188795 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9792abaf-b40e-4b73-99e0-75a7fab9f72c-utilities\") pod \"redhat-marketplace-h5zvx\" (UID: \"9792abaf-b40e-4b73-99e0-75a7fab9f72c\") " pod="openshift-marketplace/redhat-marketplace-h5zvx" Sep 29 20:14:05 crc kubenswrapper[4792]: I0929 20:14:05.290823 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9792abaf-b40e-4b73-99e0-75a7fab9f72c-utilities\") pod \"redhat-marketplace-h5zvx\" (UID: \"9792abaf-b40e-4b73-99e0-75a7fab9f72c\") " pod="openshift-marketplace/redhat-marketplace-h5zvx" Sep 29 20:14:05 crc kubenswrapper[4792]: I0929 20:14:05.290932 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9792abaf-b40e-4b73-99e0-75a7fab9f72c-catalog-content\") pod \"redhat-marketplace-h5zvx\" (UID: \"9792abaf-b40e-4b73-99e0-75a7fab9f72c\") " pod="openshift-marketplace/redhat-marketplace-h5zvx" Sep 29 20:14:05 crc kubenswrapper[4792]: I0929 20:14:05.290958 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f6jbc\" (UniqueName: \"kubernetes.io/projected/9792abaf-b40e-4b73-99e0-75a7fab9f72c-kube-api-access-f6jbc\") pod \"redhat-marketplace-h5zvx\" (UID: \"9792abaf-b40e-4b73-99e0-75a7fab9f72c\") " pod="openshift-marketplace/redhat-marketplace-h5zvx" Sep 29 20:14:05 crc kubenswrapper[4792]: I0929 20:14:05.291664 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9792abaf-b40e-4b73-99e0-75a7fab9f72c-utilities\") pod \"redhat-marketplace-h5zvx\" (UID: \"9792abaf-b40e-4b73-99e0-75a7fab9f72c\") " pod="openshift-marketplace/redhat-marketplace-h5zvx" Sep 29 20:14:05 crc kubenswrapper[4792]: I0929 20:14:05.291900 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9792abaf-b40e-4b73-99e0-75a7fab9f72c-catalog-content\") pod \"redhat-marketplace-h5zvx\" (UID: \"9792abaf-b40e-4b73-99e0-75a7fab9f72c\") " pod="openshift-marketplace/redhat-marketplace-h5zvx" Sep 29 20:14:05 crc kubenswrapper[4792]: I0929 20:14:05.313430 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f6jbc\" (UniqueName: \"kubernetes.io/projected/9792abaf-b40e-4b73-99e0-75a7fab9f72c-kube-api-access-f6jbc\") pod \"redhat-marketplace-h5zvx\" (UID: \"9792abaf-b40e-4b73-99e0-75a7fab9f72c\") " pod="openshift-marketplace/redhat-marketplace-h5zvx" Sep 29 20:14:05 crc kubenswrapper[4792]: I0929 20:14:05.469048 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-h5zvx" Sep 29 20:14:06 crc kubenswrapper[4792]: I0929 20:14:06.013874 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-h5zvx"] Sep 29 20:14:06 crc kubenswrapper[4792]: I0929 20:14:06.876302 4792 generic.go:334] "Generic (PLEG): container finished" podID="9792abaf-b40e-4b73-99e0-75a7fab9f72c" containerID="0f2754c0e51530d93cefc239d09a987efd83d343f5b36430cd84e7b25582ffb1" exitCode=0 Sep 29 20:14:06 crc kubenswrapper[4792]: I0929 20:14:06.876480 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-h5zvx" event={"ID":"9792abaf-b40e-4b73-99e0-75a7fab9f72c","Type":"ContainerDied","Data":"0f2754c0e51530d93cefc239d09a987efd83d343f5b36430cd84e7b25582ffb1"} Sep 29 20:14:06 crc kubenswrapper[4792]: I0929 20:14:06.876735 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-h5zvx" event={"ID":"9792abaf-b40e-4b73-99e0-75a7fab9f72c","Type":"ContainerStarted","Data":"359d5053bd6bec21d8c3509131e797d0fcccb1c1979fe069e7709321030fd720"} Sep 29 20:14:06 crc kubenswrapper[4792]: I0929 20:14:06.878687 4792 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Sep 29 20:14:08 crc kubenswrapper[4792]: I0929 20:14:08.896329 4792 generic.go:334] "Generic (PLEG): container finished" podID="9792abaf-b40e-4b73-99e0-75a7fab9f72c" containerID="1748fb92b0735da56d6d9cb9047216e9fcd5e51083429e456350fbb83b9cf311" exitCode=0 Sep 29 20:14:08 crc kubenswrapper[4792]: I0929 20:14:08.896375 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-h5zvx" event={"ID":"9792abaf-b40e-4b73-99e0-75a7fab9f72c","Type":"ContainerDied","Data":"1748fb92b0735da56d6d9cb9047216e9fcd5e51083429e456350fbb83b9cf311"} Sep 29 20:14:10 crc kubenswrapper[4792]: I0929 20:14:10.015576 4792 scope.go:117] "RemoveContainer" containerID="0c32d1da37ddbc11c79c6be27e5d9a54bae2c6e1dd6cff428ba145dc4ed7552e" Sep 29 20:14:10 crc kubenswrapper[4792]: E0929 20:14:10.017404 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p5q59_openshift-machine-config-operator(0ae66548-086e-4ca9-bd6f-281ce46e7557)\"" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" podUID="0ae66548-086e-4ca9-bd6f-281ce46e7557" Sep 29 20:14:10 crc kubenswrapper[4792]: I0929 20:14:10.919330 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-h5zvx" event={"ID":"9792abaf-b40e-4b73-99e0-75a7fab9f72c","Type":"ContainerStarted","Data":"d76c1b838ff06ef7edc2788a3c81ccdfc1bef2eec1981917882b8a0483a3aa85"} Sep 29 20:14:10 crc kubenswrapper[4792]: I0929 20:14:10.942038 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-h5zvx" podStartSLOduration=3.511295151 podStartE2EDuration="5.942012718s" podCreationTimestamp="2025-09-29 20:14:05 +0000 UTC" firstStartedPulling="2025-09-29 20:14:06.878396152 +0000 UTC m=+4658.871703548" lastFinishedPulling="2025-09-29 20:14:09.309113719 +0000 UTC m=+4661.302421115" observedRunningTime="2025-09-29 20:14:10.937699865 +0000 UTC m=+4662.931007261" watchObservedRunningTime="2025-09-29 20:14:10.942012718 +0000 UTC m=+4662.935320114" Sep 29 20:14:15 crc kubenswrapper[4792]: I0929 20:14:15.469708 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-h5zvx" Sep 29 20:14:15 crc kubenswrapper[4792]: I0929 20:14:15.470301 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-h5zvx" Sep 29 20:14:15 crc kubenswrapper[4792]: I0929 20:14:15.519243 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-h5zvx" Sep 29 20:14:16 crc kubenswrapper[4792]: I0929 20:14:16.011698 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-h5zvx" Sep 29 20:14:16 crc kubenswrapper[4792]: I0929 20:14:16.068713 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-h5zvx"] Sep 29 20:14:17 crc kubenswrapper[4792]: I0929 20:14:17.978461 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-h5zvx" podUID="9792abaf-b40e-4b73-99e0-75a7fab9f72c" containerName="registry-server" containerID="cri-o://d76c1b838ff06ef7edc2788a3c81ccdfc1bef2eec1981917882b8a0483a3aa85" gracePeriod=2 Sep 29 20:14:18 crc kubenswrapper[4792]: I0929 20:14:18.420691 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-h5zvx" Sep 29 20:14:18 crc kubenswrapper[4792]: I0929 20:14:18.522235 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9792abaf-b40e-4b73-99e0-75a7fab9f72c-utilities\") pod \"9792abaf-b40e-4b73-99e0-75a7fab9f72c\" (UID: \"9792abaf-b40e-4b73-99e0-75a7fab9f72c\") " Sep 29 20:14:18 crc kubenswrapper[4792]: I0929 20:14:18.522416 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9792abaf-b40e-4b73-99e0-75a7fab9f72c-catalog-content\") pod \"9792abaf-b40e-4b73-99e0-75a7fab9f72c\" (UID: \"9792abaf-b40e-4b73-99e0-75a7fab9f72c\") " Sep 29 20:14:18 crc kubenswrapper[4792]: I0929 20:14:18.522473 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-f6jbc\" (UniqueName: \"kubernetes.io/projected/9792abaf-b40e-4b73-99e0-75a7fab9f72c-kube-api-access-f6jbc\") pod \"9792abaf-b40e-4b73-99e0-75a7fab9f72c\" (UID: \"9792abaf-b40e-4b73-99e0-75a7fab9f72c\") " Sep 29 20:14:18 crc kubenswrapper[4792]: I0929 20:14:18.523121 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9792abaf-b40e-4b73-99e0-75a7fab9f72c-utilities" (OuterVolumeSpecName: "utilities") pod "9792abaf-b40e-4b73-99e0-75a7fab9f72c" (UID: "9792abaf-b40e-4b73-99e0-75a7fab9f72c"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 20:14:18 crc kubenswrapper[4792]: I0929 20:14:18.523807 4792 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9792abaf-b40e-4b73-99e0-75a7fab9f72c-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 20:14:18 crc kubenswrapper[4792]: I0929 20:14:18.534390 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9792abaf-b40e-4b73-99e0-75a7fab9f72c-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "9792abaf-b40e-4b73-99e0-75a7fab9f72c" (UID: "9792abaf-b40e-4b73-99e0-75a7fab9f72c"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 20:14:18 crc kubenswrapper[4792]: I0929 20:14:18.535022 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9792abaf-b40e-4b73-99e0-75a7fab9f72c-kube-api-access-f6jbc" (OuterVolumeSpecName: "kube-api-access-f6jbc") pod "9792abaf-b40e-4b73-99e0-75a7fab9f72c" (UID: "9792abaf-b40e-4b73-99e0-75a7fab9f72c"). InnerVolumeSpecName "kube-api-access-f6jbc". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 20:14:18 crc kubenswrapper[4792]: I0929 20:14:18.625059 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-f6jbc\" (UniqueName: \"kubernetes.io/projected/9792abaf-b40e-4b73-99e0-75a7fab9f72c-kube-api-access-f6jbc\") on node \"crc\" DevicePath \"\"" Sep 29 20:14:18 crc kubenswrapper[4792]: I0929 20:14:18.625337 4792 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9792abaf-b40e-4b73-99e0-75a7fab9f72c-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 20:14:18 crc kubenswrapper[4792]: I0929 20:14:18.990642 4792 generic.go:334] "Generic (PLEG): container finished" podID="9792abaf-b40e-4b73-99e0-75a7fab9f72c" containerID="d76c1b838ff06ef7edc2788a3c81ccdfc1bef2eec1981917882b8a0483a3aa85" exitCode=0 Sep 29 20:14:18 crc kubenswrapper[4792]: I0929 20:14:18.990675 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-h5zvx" Sep 29 20:14:18 crc kubenswrapper[4792]: I0929 20:14:18.990693 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-h5zvx" event={"ID":"9792abaf-b40e-4b73-99e0-75a7fab9f72c","Type":"ContainerDied","Data":"d76c1b838ff06ef7edc2788a3c81ccdfc1bef2eec1981917882b8a0483a3aa85"} Sep 29 20:14:18 crc kubenswrapper[4792]: I0929 20:14:18.990730 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-h5zvx" event={"ID":"9792abaf-b40e-4b73-99e0-75a7fab9f72c","Type":"ContainerDied","Data":"359d5053bd6bec21d8c3509131e797d0fcccb1c1979fe069e7709321030fd720"} Sep 29 20:14:18 crc kubenswrapper[4792]: I0929 20:14:18.990746 4792 scope.go:117] "RemoveContainer" containerID="d76c1b838ff06ef7edc2788a3c81ccdfc1bef2eec1981917882b8a0483a3aa85" Sep 29 20:14:19 crc kubenswrapper[4792]: I0929 20:14:19.015162 4792 scope.go:117] "RemoveContainer" containerID="1748fb92b0735da56d6d9cb9047216e9fcd5e51083429e456350fbb83b9cf311" Sep 29 20:14:19 crc kubenswrapper[4792]: I0929 20:14:19.051759 4792 scope.go:117] "RemoveContainer" containerID="0f2754c0e51530d93cefc239d09a987efd83d343f5b36430cd84e7b25582ffb1" Sep 29 20:14:19 crc kubenswrapper[4792]: I0929 20:14:19.051945 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-h5zvx"] Sep 29 20:14:19 crc kubenswrapper[4792]: I0929 20:14:19.064434 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-h5zvx"] Sep 29 20:14:19 crc kubenswrapper[4792]: I0929 20:14:19.096247 4792 scope.go:117] "RemoveContainer" containerID="d76c1b838ff06ef7edc2788a3c81ccdfc1bef2eec1981917882b8a0483a3aa85" Sep 29 20:14:19 crc kubenswrapper[4792]: E0929 20:14:19.096698 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d76c1b838ff06ef7edc2788a3c81ccdfc1bef2eec1981917882b8a0483a3aa85\": container with ID starting with d76c1b838ff06ef7edc2788a3c81ccdfc1bef2eec1981917882b8a0483a3aa85 not found: ID does not exist" containerID="d76c1b838ff06ef7edc2788a3c81ccdfc1bef2eec1981917882b8a0483a3aa85" Sep 29 20:14:19 crc kubenswrapper[4792]: I0929 20:14:19.096762 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d76c1b838ff06ef7edc2788a3c81ccdfc1bef2eec1981917882b8a0483a3aa85"} err="failed to get container status \"d76c1b838ff06ef7edc2788a3c81ccdfc1bef2eec1981917882b8a0483a3aa85\": rpc error: code = NotFound desc = could not find container \"d76c1b838ff06ef7edc2788a3c81ccdfc1bef2eec1981917882b8a0483a3aa85\": container with ID starting with d76c1b838ff06ef7edc2788a3c81ccdfc1bef2eec1981917882b8a0483a3aa85 not found: ID does not exist" Sep 29 20:14:19 crc kubenswrapper[4792]: I0929 20:14:19.096787 4792 scope.go:117] "RemoveContainer" containerID="1748fb92b0735da56d6d9cb9047216e9fcd5e51083429e456350fbb83b9cf311" Sep 29 20:14:19 crc kubenswrapper[4792]: E0929 20:14:19.097243 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1748fb92b0735da56d6d9cb9047216e9fcd5e51083429e456350fbb83b9cf311\": container with ID starting with 1748fb92b0735da56d6d9cb9047216e9fcd5e51083429e456350fbb83b9cf311 not found: ID does not exist" containerID="1748fb92b0735da56d6d9cb9047216e9fcd5e51083429e456350fbb83b9cf311" Sep 29 20:14:19 crc kubenswrapper[4792]: I0929 20:14:19.097333 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1748fb92b0735da56d6d9cb9047216e9fcd5e51083429e456350fbb83b9cf311"} err="failed to get container status \"1748fb92b0735da56d6d9cb9047216e9fcd5e51083429e456350fbb83b9cf311\": rpc error: code = NotFound desc = could not find container \"1748fb92b0735da56d6d9cb9047216e9fcd5e51083429e456350fbb83b9cf311\": container with ID starting with 1748fb92b0735da56d6d9cb9047216e9fcd5e51083429e456350fbb83b9cf311 not found: ID does not exist" Sep 29 20:14:19 crc kubenswrapper[4792]: I0929 20:14:19.097417 4792 scope.go:117] "RemoveContainer" containerID="0f2754c0e51530d93cefc239d09a987efd83d343f5b36430cd84e7b25582ffb1" Sep 29 20:14:19 crc kubenswrapper[4792]: E0929 20:14:19.097819 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0f2754c0e51530d93cefc239d09a987efd83d343f5b36430cd84e7b25582ffb1\": container with ID starting with 0f2754c0e51530d93cefc239d09a987efd83d343f5b36430cd84e7b25582ffb1 not found: ID does not exist" containerID="0f2754c0e51530d93cefc239d09a987efd83d343f5b36430cd84e7b25582ffb1" Sep 29 20:14:19 crc kubenswrapper[4792]: I0929 20:14:19.097890 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0f2754c0e51530d93cefc239d09a987efd83d343f5b36430cd84e7b25582ffb1"} err="failed to get container status \"0f2754c0e51530d93cefc239d09a987efd83d343f5b36430cd84e7b25582ffb1\": rpc error: code = NotFound desc = could not find container \"0f2754c0e51530d93cefc239d09a987efd83d343f5b36430cd84e7b25582ffb1\": container with ID starting with 0f2754c0e51530d93cefc239d09a987efd83d343f5b36430cd84e7b25582ffb1 not found: ID does not exist" Sep 29 20:14:21 crc kubenswrapper[4792]: I0929 20:14:21.026334 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9792abaf-b40e-4b73-99e0-75a7fab9f72c" path="/var/lib/kubelet/pods/9792abaf-b40e-4b73-99e0-75a7fab9f72c/volumes" Sep 29 20:14:25 crc kubenswrapper[4792]: I0929 20:14:25.015807 4792 scope.go:117] "RemoveContainer" containerID="0c32d1da37ddbc11c79c6be27e5d9a54bae2c6e1dd6cff428ba145dc4ed7552e" Sep 29 20:14:25 crc kubenswrapper[4792]: E0929 20:14:25.016483 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p5q59_openshift-machine-config-operator(0ae66548-086e-4ca9-bd6f-281ce46e7557)\"" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" podUID="0ae66548-086e-4ca9-bd6f-281ce46e7557" Sep 29 20:14:37 crc kubenswrapper[4792]: I0929 20:14:37.018499 4792 scope.go:117] "RemoveContainer" containerID="0c32d1da37ddbc11c79c6be27e5d9a54bae2c6e1dd6cff428ba145dc4ed7552e" Sep 29 20:14:37 crc kubenswrapper[4792]: E0929 20:14:37.019324 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p5q59_openshift-machine-config-operator(0ae66548-086e-4ca9-bd6f-281ce46e7557)\"" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" podUID="0ae66548-086e-4ca9-bd6f-281ce46e7557" Sep 29 20:14:48 crc kubenswrapper[4792]: I0929 20:14:48.015234 4792 scope.go:117] "RemoveContainer" containerID="0c32d1da37ddbc11c79c6be27e5d9a54bae2c6e1dd6cff428ba145dc4ed7552e" Sep 29 20:14:48 crc kubenswrapper[4792]: E0929 20:14:48.015868 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p5q59_openshift-machine-config-operator(0ae66548-086e-4ca9-bd6f-281ce46e7557)\"" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" podUID="0ae66548-086e-4ca9-bd6f-281ce46e7557" Sep 29 20:14:54 crc kubenswrapper[4792]: I0929 20:14:54.464678 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-l4kkd"] Sep 29 20:14:54 crc kubenswrapper[4792]: E0929 20:14:54.465559 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9792abaf-b40e-4b73-99e0-75a7fab9f72c" containerName="extract-content" Sep 29 20:14:54 crc kubenswrapper[4792]: I0929 20:14:54.465582 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="9792abaf-b40e-4b73-99e0-75a7fab9f72c" containerName="extract-content" Sep 29 20:14:54 crc kubenswrapper[4792]: E0929 20:14:54.465626 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9792abaf-b40e-4b73-99e0-75a7fab9f72c" containerName="registry-server" Sep 29 20:14:54 crc kubenswrapper[4792]: I0929 20:14:54.465636 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="9792abaf-b40e-4b73-99e0-75a7fab9f72c" containerName="registry-server" Sep 29 20:14:54 crc kubenswrapper[4792]: E0929 20:14:54.465662 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9792abaf-b40e-4b73-99e0-75a7fab9f72c" containerName="extract-utilities" Sep 29 20:14:54 crc kubenswrapper[4792]: I0929 20:14:54.465673 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="9792abaf-b40e-4b73-99e0-75a7fab9f72c" containerName="extract-utilities" Sep 29 20:14:54 crc kubenswrapper[4792]: I0929 20:14:54.466020 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="9792abaf-b40e-4b73-99e0-75a7fab9f72c" containerName="registry-server" Sep 29 20:14:54 crc kubenswrapper[4792]: I0929 20:14:54.475897 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-l4kkd" Sep 29 20:14:54 crc kubenswrapper[4792]: I0929 20:14:54.478540 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-l4kkd"] Sep 29 20:14:54 crc kubenswrapper[4792]: I0929 20:14:54.628165 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/537039de-0fde-4bf4-806a-06568653d14d-utilities\") pod \"certified-operators-l4kkd\" (UID: \"537039de-0fde-4bf4-806a-06568653d14d\") " pod="openshift-marketplace/certified-operators-l4kkd" Sep 29 20:14:54 crc kubenswrapper[4792]: I0929 20:14:54.628464 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/537039de-0fde-4bf4-806a-06568653d14d-catalog-content\") pod \"certified-operators-l4kkd\" (UID: \"537039de-0fde-4bf4-806a-06568653d14d\") " pod="openshift-marketplace/certified-operators-l4kkd" Sep 29 20:14:54 crc kubenswrapper[4792]: I0929 20:14:54.628544 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4ww94\" (UniqueName: \"kubernetes.io/projected/537039de-0fde-4bf4-806a-06568653d14d-kube-api-access-4ww94\") pod \"certified-operators-l4kkd\" (UID: \"537039de-0fde-4bf4-806a-06568653d14d\") " pod="openshift-marketplace/certified-operators-l4kkd" Sep 29 20:14:54 crc kubenswrapper[4792]: I0929 20:14:54.729969 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/537039de-0fde-4bf4-806a-06568653d14d-catalog-content\") pod \"certified-operators-l4kkd\" (UID: \"537039de-0fde-4bf4-806a-06568653d14d\") " pod="openshift-marketplace/certified-operators-l4kkd" Sep 29 20:14:54 crc kubenswrapper[4792]: I0929 20:14:54.730246 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4ww94\" (UniqueName: \"kubernetes.io/projected/537039de-0fde-4bf4-806a-06568653d14d-kube-api-access-4ww94\") pod \"certified-operators-l4kkd\" (UID: \"537039de-0fde-4bf4-806a-06568653d14d\") " pod="openshift-marketplace/certified-operators-l4kkd" Sep 29 20:14:54 crc kubenswrapper[4792]: I0929 20:14:54.730437 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/537039de-0fde-4bf4-806a-06568653d14d-utilities\") pod \"certified-operators-l4kkd\" (UID: \"537039de-0fde-4bf4-806a-06568653d14d\") " pod="openshift-marketplace/certified-operators-l4kkd" Sep 29 20:14:54 crc kubenswrapper[4792]: I0929 20:14:54.730956 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/537039de-0fde-4bf4-806a-06568653d14d-utilities\") pod \"certified-operators-l4kkd\" (UID: \"537039de-0fde-4bf4-806a-06568653d14d\") " pod="openshift-marketplace/certified-operators-l4kkd" Sep 29 20:14:54 crc kubenswrapper[4792]: I0929 20:14:54.731225 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/537039de-0fde-4bf4-806a-06568653d14d-catalog-content\") pod \"certified-operators-l4kkd\" (UID: \"537039de-0fde-4bf4-806a-06568653d14d\") " pod="openshift-marketplace/certified-operators-l4kkd" Sep 29 20:14:54 crc kubenswrapper[4792]: I0929 20:14:54.761487 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4ww94\" (UniqueName: \"kubernetes.io/projected/537039de-0fde-4bf4-806a-06568653d14d-kube-api-access-4ww94\") pod \"certified-operators-l4kkd\" (UID: \"537039de-0fde-4bf4-806a-06568653d14d\") " pod="openshift-marketplace/certified-operators-l4kkd" Sep 29 20:14:54 crc kubenswrapper[4792]: I0929 20:14:54.802612 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-l4kkd" Sep 29 20:14:55 crc kubenswrapper[4792]: I0929 20:14:55.196121 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-l4kkd"] Sep 29 20:14:55 crc kubenswrapper[4792]: I0929 20:14:55.376486 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-l4kkd" event={"ID":"537039de-0fde-4bf4-806a-06568653d14d","Type":"ContainerStarted","Data":"bbd8732e0a93d1cb6d701ff59823a7ad81aa76cf539527bd29fd88e53aaf24c8"} Sep 29 20:14:56 crc kubenswrapper[4792]: I0929 20:14:56.390760 4792 generic.go:334] "Generic (PLEG): container finished" podID="537039de-0fde-4bf4-806a-06568653d14d" containerID="f7879702396a04e82f04a8cd01fefefd5cb694fb1c4cc55520e8dbf9d2895516" exitCode=0 Sep 29 20:14:56 crc kubenswrapper[4792]: I0929 20:14:56.390942 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-l4kkd" event={"ID":"537039de-0fde-4bf4-806a-06568653d14d","Type":"ContainerDied","Data":"f7879702396a04e82f04a8cd01fefefd5cb694fb1c4cc55520e8dbf9d2895516"} Sep 29 20:14:57 crc kubenswrapper[4792]: I0929 20:14:57.401556 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-l4kkd" event={"ID":"537039de-0fde-4bf4-806a-06568653d14d","Type":"ContainerStarted","Data":"337a70f56076ce376cd1ebc53fa869be61737632e78c9299ddb37cbe0e5d78cd"} Sep 29 20:14:58 crc kubenswrapper[4792]: I0929 20:14:58.415785 4792 generic.go:334] "Generic (PLEG): container finished" podID="537039de-0fde-4bf4-806a-06568653d14d" containerID="337a70f56076ce376cd1ebc53fa869be61737632e78c9299ddb37cbe0e5d78cd" exitCode=0 Sep 29 20:14:58 crc kubenswrapper[4792]: I0929 20:14:58.416161 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-l4kkd" event={"ID":"537039de-0fde-4bf4-806a-06568653d14d","Type":"ContainerDied","Data":"337a70f56076ce376cd1ebc53fa869be61737632e78c9299ddb37cbe0e5d78cd"} Sep 29 20:14:59 crc kubenswrapper[4792]: I0929 20:14:59.432124 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-l4kkd" event={"ID":"537039de-0fde-4bf4-806a-06568653d14d","Type":"ContainerStarted","Data":"3ba7187c95371f90b40e63b12c097a6ba2492c6be2f0b92fecdb5f0c9d8b5174"} Sep 29 20:14:59 crc kubenswrapper[4792]: I0929 20:14:59.459550 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-l4kkd" podStartSLOduration=2.860468775 podStartE2EDuration="5.459525777s" podCreationTimestamp="2025-09-29 20:14:54 +0000 UTC" firstStartedPulling="2025-09-29 20:14:56.393233291 +0000 UTC m=+4708.386540727" lastFinishedPulling="2025-09-29 20:14:58.992290323 +0000 UTC m=+4710.985597729" observedRunningTime="2025-09-29 20:14:59.456060347 +0000 UTC m=+4711.449367743" watchObservedRunningTime="2025-09-29 20:14:59.459525777 +0000 UTC m=+4711.452833183" Sep 29 20:15:00 crc kubenswrapper[4792]: I0929 20:15:00.015120 4792 scope.go:117] "RemoveContainer" containerID="0c32d1da37ddbc11c79c6be27e5d9a54bae2c6e1dd6cff428ba145dc4ed7552e" Sep 29 20:15:00 crc kubenswrapper[4792]: E0929 20:15:00.015590 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p5q59_openshift-machine-config-operator(0ae66548-086e-4ca9-bd6f-281ce46e7557)\"" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" podUID="0ae66548-086e-4ca9-bd6f-281ce46e7557" Sep 29 20:15:00 crc kubenswrapper[4792]: I0929 20:15:00.160005 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319615-cfwmp"] Sep 29 20:15:00 crc kubenswrapper[4792]: I0929 20:15:00.162404 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319615-cfwmp" Sep 29 20:15:00 crc kubenswrapper[4792]: I0929 20:15:00.167296 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Sep 29 20:15:00 crc kubenswrapper[4792]: I0929 20:15:00.171378 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319615-cfwmp"] Sep 29 20:15:00 crc kubenswrapper[4792]: I0929 20:15:00.172960 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Sep 29 20:15:00 crc kubenswrapper[4792]: I0929 20:15:00.245613 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/af974c14-099b-4120-a5d9-7ab25fce6282-secret-volume\") pod \"collect-profiles-29319615-cfwmp\" (UID: \"af974c14-099b-4120-a5d9-7ab25fce6282\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319615-cfwmp" Sep 29 20:15:00 crc kubenswrapper[4792]: I0929 20:15:00.245935 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/af974c14-099b-4120-a5d9-7ab25fce6282-config-volume\") pod \"collect-profiles-29319615-cfwmp\" (UID: \"af974c14-099b-4120-a5d9-7ab25fce6282\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319615-cfwmp" Sep 29 20:15:00 crc kubenswrapper[4792]: I0929 20:15:00.246051 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2f25n\" (UniqueName: \"kubernetes.io/projected/af974c14-099b-4120-a5d9-7ab25fce6282-kube-api-access-2f25n\") pod \"collect-profiles-29319615-cfwmp\" (UID: \"af974c14-099b-4120-a5d9-7ab25fce6282\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319615-cfwmp" Sep 29 20:15:00 crc kubenswrapper[4792]: I0929 20:15:00.347674 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/af974c14-099b-4120-a5d9-7ab25fce6282-config-volume\") pod \"collect-profiles-29319615-cfwmp\" (UID: \"af974c14-099b-4120-a5d9-7ab25fce6282\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319615-cfwmp" Sep 29 20:15:00 crc kubenswrapper[4792]: I0929 20:15:00.347754 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2f25n\" (UniqueName: \"kubernetes.io/projected/af974c14-099b-4120-a5d9-7ab25fce6282-kube-api-access-2f25n\") pod \"collect-profiles-29319615-cfwmp\" (UID: \"af974c14-099b-4120-a5d9-7ab25fce6282\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319615-cfwmp" Sep 29 20:15:00 crc kubenswrapper[4792]: I0929 20:15:00.347869 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/af974c14-099b-4120-a5d9-7ab25fce6282-secret-volume\") pod \"collect-profiles-29319615-cfwmp\" (UID: \"af974c14-099b-4120-a5d9-7ab25fce6282\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319615-cfwmp" Sep 29 20:15:00 crc kubenswrapper[4792]: I0929 20:15:00.349669 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/af974c14-099b-4120-a5d9-7ab25fce6282-config-volume\") pod \"collect-profiles-29319615-cfwmp\" (UID: \"af974c14-099b-4120-a5d9-7ab25fce6282\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319615-cfwmp" Sep 29 20:15:00 crc kubenswrapper[4792]: I0929 20:15:00.361578 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/af974c14-099b-4120-a5d9-7ab25fce6282-secret-volume\") pod \"collect-profiles-29319615-cfwmp\" (UID: \"af974c14-099b-4120-a5d9-7ab25fce6282\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319615-cfwmp" Sep 29 20:15:00 crc kubenswrapper[4792]: I0929 20:15:00.365575 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2f25n\" (UniqueName: \"kubernetes.io/projected/af974c14-099b-4120-a5d9-7ab25fce6282-kube-api-access-2f25n\") pod \"collect-profiles-29319615-cfwmp\" (UID: \"af974c14-099b-4120-a5d9-7ab25fce6282\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319615-cfwmp" Sep 29 20:15:00 crc kubenswrapper[4792]: I0929 20:15:00.482888 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319615-cfwmp" Sep 29 20:15:00 crc kubenswrapper[4792]: I0929 20:15:00.937229 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319615-cfwmp"] Sep 29 20:15:00 crc kubenswrapper[4792]: W0929 20:15:00.937688 4792 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podaf974c14_099b_4120_a5d9_7ab25fce6282.slice/crio-0bc010b4f48660d19acf3fbceb950c3ed78e9c4ff41628f0e3f810a8a2e97f6c WatchSource:0}: Error finding container 0bc010b4f48660d19acf3fbceb950c3ed78e9c4ff41628f0e3f810a8a2e97f6c: Status 404 returned error can't find the container with id 0bc010b4f48660d19acf3fbceb950c3ed78e9c4ff41628f0e3f810a8a2e97f6c Sep 29 20:15:01 crc kubenswrapper[4792]: I0929 20:15:01.447268 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29319615-cfwmp" event={"ID":"af974c14-099b-4120-a5d9-7ab25fce6282","Type":"ContainerStarted","Data":"2334a84ee678ccbaa1e9970bd4e2beb3a40109aea9a25a2326475e279194eb9b"} Sep 29 20:15:01 crc kubenswrapper[4792]: I0929 20:15:01.447550 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29319615-cfwmp" event={"ID":"af974c14-099b-4120-a5d9-7ab25fce6282","Type":"ContainerStarted","Data":"0bc010b4f48660d19acf3fbceb950c3ed78e9c4ff41628f0e3f810a8a2e97f6c"} Sep 29 20:15:02 crc kubenswrapper[4792]: I0929 20:15:02.459053 4792 generic.go:334] "Generic (PLEG): container finished" podID="af974c14-099b-4120-a5d9-7ab25fce6282" containerID="2334a84ee678ccbaa1e9970bd4e2beb3a40109aea9a25a2326475e279194eb9b" exitCode=0 Sep 29 20:15:02 crc kubenswrapper[4792]: I0929 20:15:02.459155 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29319615-cfwmp" event={"ID":"af974c14-099b-4120-a5d9-7ab25fce6282","Type":"ContainerDied","Data":"2334a84ee678ccbaa1e9970bd4e2beb3a40109aea9a25a2326475e279194eb9b"} Sep 29 20:15:03 crc kubenswrapper[4792]: I0929 20:15:03.830645 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319615-cfwmp" Sep 29 20:15:03 crc kubenswrapper[4792]: I0929 20:15:03.923694 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2f25n\" (UniqueName: \"kubernetes.io/projected/af974c14-099b-4120-a5d9-7ab25fce6282-kube-api-access-2f25n\") pod \"af974c14-099b-4120-a5d9-7ab25fce6282\" (UID: \"af974c14-099b-4120-a5d9-7ab25fce6282\") " Sep 29 20:15:03 crc kubenswrapper[4792]: I0929 20:15:03.924022 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/af974c14-099b-4120-a5d9-7ab25fce6282-secret-volume\") pod \"af974c14-099b-4120-a5d9-7ab25fce6282\" (UID: \"af974c14-099b-4120-a5d9-7ab25fce6282\") " Sep 29 20:15:03 crc kubenswrapper[4792]: I0929 20:15:03.924097 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/af974c14-099b-4120-a5d9-7ab25fce6282-config-volume\") pod \"af974c14-099b-4120-a5d9-7ab25fce6282\" (UID: \"af974c14-099b-4120-a5d9-7ab25fce6282\") " Sep 29 20:15:03 crc kubenswrapper[4792]: I0929 20:15:03.925472 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/af974c14-099b-4120-a5d9-7ab25fce6282-config-volume" (OuterVolumeSpecName: "config-volume") pod "af974c14-099b-4120-a5d9-7ab25fce6282" (UID: "af974c14-099b-4120-a5d9-7ab25fce6282"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 20:15:04 crc kubenswrapper[4792]: I0929 20:15:04.026443 4792 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/af974c14-099b-4120-a5d9-7ab25fce6282-config-volume\") on node \"crc\" DevicePath \"\"" Sep 29 20:15:04 crc kubenswrapper[4792]: I0929 20:15:04.393614 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/af974c14-099b-4120-a5d9-7ab25fce6282-kube-api-access-2f25n" (OuterVolumeSpecName: "kube-api-access-2f25n") pod "af974c14-099b-4120-a5d9-7ab25fce6282" (UID: "af974c14-099b-4120-a5d9-7ab25fce6282"). InnerVolumeSpecName "kube-api-access-2f25n". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 20:15:04 crc kubenswrapper[4792]: I0929 20:15:04.394299 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/af974c14-099b-4120-a5d9-7ab25fce6282-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "af974c14-099b-4120-a5d9-7ab25fce6282" (UID: "af974c14-099b-4120-a5d9-7ab25fce6282"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 20:15:04 crc kubenswrapper[4792]: I0929 20:15:04.435092 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2f25n\" (UniqueName: \"kubernetes.io/projected/af974c14-099b-4120-a5d9-7ab25fce6282-kube-api-access-2f25n\") on node \"crc\" DevicePath \"\"" Sep 29 20:15:04 crc kubenswrapper[4792]: I0929 20:15:04.435118 4792 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/af974c14-099b-4120-a5d9-7ab25fce6282-secret-volume\") on node \"crc\" DevicePath \"\"" Sep 29 20:15:04 crc kubenswrapper[4792]: I0929 20:15:04.482213 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29319615-cfwmp" event={"ID":"af974c14-099b-4120-a5d9-7ab25fce6282","Type":"ContainerDied","Data":"0bc010b4f48660d19acf3fbceb950c3ed78e9c4ff41628f0e3f810a8a2e97f6c"} Sep 29 20:15:04 crc kubenswrapper[4792]: I0929 20:15:04.482249 4792 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0bc010b4f48660d19acf3fbceb950c3ed78e9c4ff41628f0e3f810a8a2e97f6c" Sep 29 20:15:04 crc kubenswrapper[4792]: I0929 20:15:04.482377 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319615-cfwmp" Sep 29 20:15:04 crc kubenswrapper[4792]: I0929 20:15:04.559086 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319570-dc9rd"] Sep 29 20:15:04 crc kubenswrapper[4792]: I0929 20:15:04.568710 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319570-dc9rd"] Sep 29 20:15:04 crc kubenswrapper[4792]: I0929 20:15:04.803473 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-l4kkd" Sep 29 20:15:04 crc kubenswrapper[4792]: I0929 20:15:04.805235 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-l4kkd" Sep 29 20:15:04 crc kubenswrapper[4792]: I0929 20:15:04.850598 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-l4kkd" Sep 29 20:15:05 crc kubenswrapper[4792]: I0929 20:15:05.027139 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="584d7b6c-450e-4739-91c4-6a89c1ab2487" path="/var/lib/kubelet/pods/584d7b6c-450e-4739-91c4-6a89c1ab2487/volumes" Sep 29 20:15:05 crc kubenswrapper[4792]: I0929 20:15:05.533454 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-l4kkd" Sep 29 20:15:05 crc kubenswrapper[4792]: I0929 20:15:05.595023 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-l4kkd"] Sep 29 20:15:07 crc kubenswrapper[4792]: I0929 20:15:07.526705 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-l4kkd" podUID="537039de-0fde-4bf4-806a-06568653d14d" containerName="registry-server" containerID="cri-o://3ba7187c95371f90b40e63b12c097a6ba2492c6be2f0b92fecdb5f0c9d8b5174" gracePeriod=2 Sep 29 20:15:08 crc kubenswrapper[4792]: I0929 20:15:08.035927 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-l4kkd" Sep 29 20:15:08 crc kubenswrapper[4792]: I0929 20:15:08.104478 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4ww94\" (UniqueName: \"kubernetes.io/projected/537039de-0fde-4bf4-806a-06568653d14d-kube-api-access-4ww94\") pod \"537039de-0fde-4bf4-806a-06568653d14d\" (UID: \"537039de-0fde-4bf4-806a-06568653d14d\") " Sep 29 20:15:08 crc kubenswrapper[4792]: I0929 20:15:08.104585 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/537039de-0fde-4bf4-806a-06568653d14d-utilities\") pod \"537039de-0fde-4bf4-806a-06568653d14d\" (UID: \"537039de-0fde-4bf4-806a-06568653d14d\") " Sep 29 20:15:08 crc kubenswrapper[4792]: I0929 20:15:08.104621 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/537039de-0fde-4bf4-806a-06568653d14d-catalog-content\") pod \"537039de-0fde-4bf4-806a-06568653d14d\" (UID: \"537039de-0fde-4bf4-806a-06568653d14d\") " Sep 29 20:15:08 crc kubenswrapper[4792]: I0929 20:15:08.105332 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/537039de-0fde-4bf4-806a-06568653d14d-utilities" (OuterVolumeSpecName: "utilities") pod "537039de-0fde-4bf4-806a-06568653d14d" (UID: "537039de-0fde-4bf4-806a-06568653d14d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 20:15:08 crc kubenswrapper[4792]: I0929 20:15:08.110901 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/537039de-0fde-4bf4-806a-06568653d14d-kube-api-access-4ww94" (OuterVolumeSpecName: "kube-api-access-4ww94") pod "537039de-0fde-4bf4-806a-06568653d14d" (UID: "537039de-0fde-4bf4-806a-06568653d14d"). InnerVolumeSpecName "kube-api-access-4ww94". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 20:15:08 crc kubenswrapper[4792]: I0929 20:15:08.207003 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4ww94\" (UniqueName: \"kubernetes.io/projected/537039de-0fde-4bf4-806a-06568653d14d-kube-api-access-4ww94\") on node \"crc\" DevicePath \"\"" Sep 29 20:15:08 crc kubenswrapper[4792]: I0929 20:15:08.207039 4792 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/537039de-0fde-4bf4-806a-06568653d14d-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 20:15:08 crc kubenswrapper[4792]: I0929 20:15:08.396413 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/537039de-0fde-4bf4-806a-06568653d14d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "537039de-0fde-4bf4-806a-06568653d14d" (UID: "537039de-0fde-4bf4-806a-06568653d14d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 20:15:08 crc kubenswrapper[4792]: I0929 20:15:08.410575 4792 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/537039de-0fde-4bf4-806a-06568653d14d-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 20:15:08 crc kubenswrapper[4792]: I0929 20:15:08.536378 4792 generic.go:334] "Generic (PLEG): container finished" podID="537039de-0fde-4bf4-806a-06568653d14d" containerID="3ba7187c95371f90b40e63b12c097a6ba2492c6be2f0b92fecdb5f0c9d8b5174" exitCode=0 Sep 29 20:15:08 crc kubenswrapper[4792]: I0929 20:15:08.536515 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-l4kkd" event={"ID":"537039de-0fde-4bf4-806a-06568653d14d","Type":"ContainerDied","Data":"3ba7187c95371f90b40e63b12c097a6ba2492c6be2f0b92fecdb5f0c9d8b5174"} Sep 29 20:15:08 crc kubenswrapper[4792]: I0929 20:15:08.536714 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-l4kkd" event={"ID":"537039de-0fde-4bf4-806a-06568653d14d","Type":"ContainerDied","Data":"bbd8732e0a93d1cb6d701ff59823a7ad81aa76cf539527bd29fd88e53aaf24c8"} Sep 29 20:15:08 crc kubenswrapper[4792]: I0929 20:15:08.536736 4792 scope.go:117] "RemoveContainer" containerID="3ba7187c95371f90b40e63b12c097a6ba2492c6be2f0b92fecdb5f0c9d8b5174" Sep 29 20:15:08 crc kubenswrapper[4792]: I0929 20:15:08.536586 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-l4kkd" Sep 29 20:15:08 crc kubenswrapper[4792]: I0929 20:15:08.570274 4792 scope.go:117] "RemoveContainer" containerID="337a70f56076ce376cd1ebc53fa869be61737632e78c9299ddb37cbe0e5d78cd" Sep 29 20:15:08 crc kubenswrapper[4792]: I0929 20:15:08.580739 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-l4kkd"] Sep 29 20:15:08 crc kubenswrapper[4792]: I0929 20:15:08.592168 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-l4kkd"] Sep 29 20:15:08 crc kubenswrapper[4792]: I0929 20:15:08.604788 4792 scope.go:117] "RemoveContainer" containerID="f7879702396a04e82f04a8cd01fefefd5cb694fb1c4cc55520e8dbf9d2895516" Sep 29 20:15:08 crc kubenswrapper[4792]: I0929 20:15:08.663964 4792 scope.go:117] "RemoveContainer" containerID="3ba7187c95371f90b40e63b12c097a6ba2492c6be2f0b92fecdb5f0c9d8b5174" Sep 29 20:15:08 crc kubenswrapper[4792]: E0929 20:15:08.664420 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3ba7187c95371f90b40e63b12c097a6ba2492c6be2f0b92fecdb5f0c9d8b5174\": container with ID starting with 3ba7187c95371f90b40e63b12c097a6ba2492c6be2f0b92fecdb5f0c9d8b5174 not found: ID does not exist" containerID="3ba7187c95371f90b40e63b12c097a6ba2492c6be2f0b92fecdb5f0c9d8b5174" Sep 29 20:15:08 crc kubenswrapper[4792]: I0929 20:15:08.664456 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3ba7187c95371f90b40e63b12c097a6ba2492c6be2f0b92fecdb5f0c9d8b5174"} err="failed to get container status \"3ba7187c95371f90b40e63b12c097a6ba2492c6be2f0b92fecdb5f0c9d8b5174\": rpc error: code = NotFound desc = could not find container \"3ba7187c95371f90b40e63b12c097a6ba2492c6be2f0b92fecdb5f0c9d8b5174\": container with ID starting with 3ba7187c95371f90b40e63b12c097a6ba2492c6be2f0b92fecdb5f0c9d8b5174 not found: ID does not exist" Sep 29 20:15:08 crc kubenswrapper[4792]: I0929 20:15:08.664480 4792 scope.go:117] "RemoveContainer" containerID="337a70f56076ce376cd1ebc53fa869be61737632e78c9299ddb37cbe0e5d78cd" Sep 29 20:15:08 crc kubenswrapper[4792]: E0929 20:15:08.664773 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"337a70f56076ce376cd1ebc53fa869be61737632e78c9299ddb37cbe0e5d78cd\": container with ID starting with 337a70f56076ce376cd1ebc53fa869be61737632e78c9299ddb37cbe0e5d78cd not found: ID does not exist" containerID="337a70f56076ce376cd1ebc53fa869be61737632e78c9299ddb37cbe0e5d78cd" Sep 29 20:15:08 crc kubenswrapper[4792]: I0929 20:15:08.664802 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"337a70f56076ce376cd1ebc53fa869be61737632e78c9299ddb37cbe0e5d78cd"} err="failed to get container status \"337a70f56076ce376cd1ebc53fa869be61737632e78c9299ddb37cbe0e5d78cd\": rpc error: code = NotFound desc = could not find container \"337a70f56076ce376cd1ebc53fa869be61737632e78c9299ddb37cbe0e5d78cd\": container with ID starting with 337a70f56076ce376cd1ebc53fa869be61737632e78c9299ddb37cbe0e5d78cd not found: ID does not exist" Sep 29 20:15:08 crc kubenswrapper[4792]: I0929 20:15:08.664821 4792 scope.go:117] "RemoveContainer" containerID="f7879702396a04e82f04a8cd01fefefd5cb694fb1c4cc55520e8dbf9d2895516" Sep 29 20:15:08 crc kubenswrapper[4792]: E0929 20:15:08.665147 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f7879702396a04e82f04a8cd01fefefd5cb694fb1c4cc55520e8dbf9d2895516\": container with ID starting with f7879702396a04e82f04a8cd01fefefd5cb694fb1c4cc55520e8dbf9d2895516 not found: ID does not exist" containerID="f7879702396a04e82f04a8cd01fefefd5cb694fb1c4cc55520e8dbf9d2895516" Sep 29 20:15:08 crc kubenswrapper[4792]: I0929 20:15:08.665172 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f7879702396a04e82f04a8cd01fefefd5cb694fb1c4cc55520e8dbf9d2895516"} err="failed to get container status \"f7879702396a04e82f04a8cd01fefefd5cb694fb1c4cc55520e8dbf9d2895516\": rpc error: code = NotFound desc = could not find container \"f7879702396a04e82f04a8cd01fefefd5cb694fb1c4cc55520e8dbf9d2895516\": container with ID starting with f7879702396a04e82f04a8cd01fefefd5cb694fb1c4cc55520e8dbf9d2895516 not found: ID does not exist" Sep 29 20:15:09 crc kubenswrapper[4792]: I0929 20:15:09.030185 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="537039de-0fde-4bf4-806a-06568653d14d" path="/var/lib/kubelet/pods/537039de-0fde-4bf4-806a-06568653d14d/volumes" Sep 29 20:15:11 crc kubenswrapper[4792]: I0929 20:15:11.016018 4792 scope.go:117] "RemoveContainer" containerID="0c32d1da37ddbc11c79c6be27e5d9a54bae2c6e1dd6cff428ba145dc4ed7552e" Sep 29 20:15:11 crc kubenswrapper[4792]: E0929 20:15:11.016520 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p5q59_openshift-machine-config-operator(0ae66548-086e-4ca9-bd6f-281ce46e7557)\"" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" podUID="0ae66548-086e-4ca9-bd6f-281ce46e7557" Sep 29 20:15:26 crc kubenswrapper[4792]: I0929 20:15:26.016430 4792 scope.go:117] "RemoveContainer" containerID="0c32d1da37ddbc11c79c6be27e5d9a54bae2c6e1dd6cff428ba145dc4ed7552e" Sep 29 20:15:26 crc kubenswrapper[4792]: E0929 20:15:26.017506 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p5q59_openshift-machine-config-operator(0ae66548-086e-4ca9-bd6f-281ce46e7557)\"" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" podUID="0ae66548-086e-4ca9-bd6f-281ce46e7557" Sep 29 20:15:39 crc kubenswrapper[4792]: I0929 20:15:39.089216 4792 scope.go:117] "RemoveContainer" containerID="338313961c095e2453e882321216d21854e5164a510a62e4344d0e002e3d24b1" Sep 29 20:15:41 crc kubenswrapper[4792]: I0929 20:15:41.015630 4792 scope.go:117] "RemoveContainer" containerID="0c32d1da37ddbc11c79c6be27e5d9a54bae2c6e1dd6cff428ba145dc4ed7552e" Sep 29 20:15:41 crc kubenswrapper[4792]: E0929 20:15:41.016091 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p5q59_openshift-machine-config-operator(0ae66548-086e-4ca9-bd6f-281ce46e7557)\"" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" podUID="0ae66548-086e-4ca9-bd6f-281ce46e7557" Sep 29 20:15:53 crc kubenswrapper[4792]: I0929 20:15:53.015584 4792 scope.go:117] "RemoveContainer" containerID="0c32d1da37ddbc11c79c6be27e5d9a54bae2c6e1dd6cff428ba145dc4ed7552e" Sep 29 20:15:53 crc kubenswrapper[4792]: E0929 20:15:53.016493 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p5q59_openshift-machine-config-operator(0ae66548-086e-4ca9-bd6f-281ce46e7557)\"" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" podUID="0ae66548-086e-4ca9-bd6f-281ce46e7557" Sep 29 20:16:02 crc kubenswrapper[4792]: I0929 20:16:02.161587 4792 generic.go:334] "Generic (PLEG): container finished" podID="5bf9f011-e08b-4209-be0e-3706bc53b487" containerID="7dfcbfdd8808317eb05f75d27d1a3637a8200b8f42c538eb22bf7a7da8e24e42" exitCode=0 Sep 29 20:16:02 crc kubenswrapper[4792]: I0929 20:16:02.161689 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-kndnj/must-gather-27qxh" event={"ID":"5bf9f011-e08b-4209-be0e-3706bc53b487","Type":"ContainerDied","Data":"7dfcbfdd8808317eb05f75d27d1a3637a8200b8f42c538eb22bf7a7da8e24e42"} Sep 29 20:16:02 crc kubenswrapper[4792]: I0929 20:16:02.163876 4792 scope.go:117] "RemoveContainer" containerID="7dfcbfdd8808317eb05f75d27d1a3637a8200b8f42c538eb22bf7a7da8e24e42" Sep 29 20:16:03 crc kubenswrapper[4792]: I0929 20:16:03.184507 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-kndnj_must-gather-27qxh_5bf9f011-e08b-4209-be0e-3706bc53b487/gather/0.log" Sep 29 20:16:05 crc kubenswrapper[4792]: I0929 20:16:05.015273 4792 scope.go:117] "RemoveContainer" containerID="0c32d1da37ddbc11c79c6be27e5d9a54bae2c6e1dd6cff428ba145dc4ed7552e" Sep 29 20:16:05 crc kubenswrapper[4792]: E0929 20:16:05.016059 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p5q59_openshift-machine-config-operator(0ae66548-086e-4ca9-bd6f-281ce46e7557)\"" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" podUID="0ae66548-086e-4ca9-bd6f-281ce46e7557" Sep 29 20:16:11 crc kubenswrapper[4792]: I0929 20:16:11.742701 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-kndnj/must-gather-27qxh"] Sep 29 20:16:11 crc kubenswrapper[4792]: I0929 20:16:11.743547 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-must-gather-kndnj/must-gather-27qxh" podUID="5bf9f011-e08b-4209-be0e-3706bc53b487" containerName="copy" containerID="cri-o://602ecff488bafb301d030f93513c2c22f902109ac38af5a8a476cc30b2659d4b" gracePeriod=2 Sep 29 20:16:11 crc kubenswrapper[4792]: I0929 20:16:11.750916 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-kndnj/must-gather-27qxh"] Sep 29 20:16:12 crc kubenswrapper[4792]: I0929 20:16:12.190103 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-kndnj_must-gather-27qxh_5bf9f011-e08b-4209-be0e-3706bc53b487/copy/0.log" Sep 29 20:16:12 crc kubenswrapper[4792]: I0929 20:16:12.190950 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-kndnj/must-gather-27qxh" Sep 29 20:16:12 crc kubenswrapper[4792]: I0929 20:16:12.252317 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-kndnj_must-gather-27qxh_5bf9f011-e08b-4209-be0e-3706bc53b487/copy/0.log" Sep 29 20:16:12 crc kubenswrapper[4792]: I0929 20:16:12.252998 4792 generic.go:334] "Generic (PLEG): container finished" podID="5bf9f011-e08b-4209-be0e-3706bc53b487" containerID="602ecff488bafb301d030f93513c2c22f902109ac38af5a8a476cc30b2659d4b" exitCode=143 Sep 29 20:16:12 crc kubenswrapper[4792]: I0929 20:16:12.253055 4792 scope.go:117] "RemoveContainer" containerID="602ecff488bafb301d030f93513c2c22f902109ac38af5a8a476cc30b2659d4b" Sep 29 20:16:12 crc kubenswrapper[4792]: I0929 20:16:12.253182 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-kndnj/must-gather-27qxh" Sep 29 20:16:12 crc kubenswrapper[4792]: I0929 20:16:12.273139 4792 scope.go:117] "RemoveContainer" containerID="7dfcbfdd8808317eb05f75d27d1a3637a8200b8f42c538eb22bf7a7da8e24e42" Sep 29 20:16:12 crc kubenswrapper[4792]: I0929 20:16:12.284447 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/5bf9f011-e08b-4209-be0e-3706bc53b487-must-gather-output\") pod \"5bf9f011-e08b-4209-be0e-3706bc53b487\" (UID: \"5bf9f011-e08b-4209-be0e-3706bc53b487\") " Sep 29 20:16:12 crc kubenswrapper[4792]: I0929 20:16:12.284620 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-c4h4h\" (UniqueName: \"kubernetes.io/projected/5bf9f011-e08b-4209-be0e-3706bc53b487-kube-api-access-c4h4h\") pod \"5bf9f011-e08b-4209-be0e-3706bc53b487\" (UID: \"5bf9f011-e08b-4209-be0e-3706bc53b487\") " Sep 29 20:16:12 crc kubenswrapper[4792]: I0929 20:16:12.293219 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5bf9f011-e08b-4209-be0e-3706bc53b487-kube-api-access-c4h4h" (OuterVolumeSpecName: "kube-api-access-c4h4h") pod "5bf9f011-e08b-4209-be0e-3706bc53b487" (UID: "5bf9f011-e08b-4209-be0e-3706bc53b487"). InnerVolumeSpecName "kube-api-access-c4h4h". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 20:16:12 crc kubenswrapper[4792]: I0929 20:16:12.324480 4792 scope.go:117] "RemoveContainer" containerID="602ecff488bafb301d030f93513c2c22f902109ac38af5a8a476cc30b2659d4b" Sep 29 20:16:12 crc kubenswrapper[4792]: E0929 20:16:12.331833 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"602ecff488bafb301d030f93513c2c22f902109ac38af5a8a476cc30b2659d4b\": container with ID starting with 602ecff488bafb301d030f93513c2c22f902109ac38af5a8a476cc30b2659d4b not found: ID does not exist" containerID="602ecff488bafb301d030f93513c2c22f902109ac38af5a8a476cc30b2659d4b" Sep 29 20:16:12 crc kubenswrapper[4792]: I0929 20:16:12.331885 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"602ecff488bafb301d030f93513c2c22f902109ac38af5a8a476cc30b2659d4b"} err="failed to get container status \"602ecff488bafb301d030f93513c2c22f902109ac38af5a8a476cc30b2659d4b\": rpc error: code = NotFound desc = could not find container \"602ecff488bafb301d030f93513c2c22f902109ac38af5a8a476cc30b2659d4b\": container with ID starting with 602ecff488bafb301d030f93513c2c22f902109ac38af5a8a476cc30b2659d4b not found: ID does not exist" Sep 29 20:16:12 crc kubenswrapper[4792]: I0929 20:16:12.331913 4792 scope.go:117] "RemoveContainer" containerID="7dfcbfdd8808317eb05f75d27d1a3637a8200b8f42c538eb22bf7a7da8e24e42" Sep 29 20:16:12 crc kubenswrapper[4792]: E0929 20:16:12.332350 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7dfcbfdd8808317eb05f75d27d1a3637a8200b8f42c538eb22bf7a7da8e24e42\": container with ID starting with 7dfcbfdd8808317eb05f75d27d1a3637a8200b8f42c538eb22bf7a7da8e24e42 not found: ID does not exist" containerID="7dfcbfdd8808317eb05f75d27d1a3637a8200b8f42c538eb22bf7a7da8e24e42" Sep 29 20:16:12 crc kubenswrapper[4792]: I0929 20:16:12.332371 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7dfcbfdd8808317eb05f75d27d1a3637a8200b8f42c538eb22bf7a7da8e24e42"} err="failed to get container status \"7dfcbfdd8808317eb05f75d27d1a3637a8200b8f42c538eb22bf7a7da8e24e42\": rpc error: code = NotFound desc = could not find container \"7dfcbfdd8808317eb05f75d27d1a3637a8200b8f42c538eb22bf7a7da8e24e42\": container with ID starting with 7dfcbfdd8808317eb05f75d27d1a3637a8200b8f42c538eb22bf7a7da8e24e42 not found: ID does not exist" Sep 29 20:16:12 crc kubenswrapper[4792]: I0929 20:16:12.387236 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-c4h4h\" (UniqueName: \"kubernetes.io/projected/5bf9f011-e08b-4209-be0e-3706bc53b487-kube-api-access-c4h4h\") on node \"crc\" DevicePath \"\"" Sep 29 20:16:12 crc kubenswrapper[4792]: I0929 20:16:12.466288 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5bf9f011-e08b-4209-be0e-3706bc53b487-must-gather-output" (OuterVolumeSpecName: "must-gather-output") pod "5bf9f011-e08b-4209-be0e-3706bc53b487" (UID: "5bf9f011-e08b-4209-be0e-3706bc53b487"). InnerVolumeSpecName "must-gather-output". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 20:16:12 crc kubenswrapper[4792]: I0929 20:16:12.490293 4792 reconciler_common.go:293] "Volume detached for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/5bf9f011-e08b-4209-be0e-3706bc53b487-must-gather-output\") on node \"crc\" DevicePath \"\"" Sep 29 20:16:13 crc kubenswrapper[4792]: I0929 20:16:13.025049 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5bf9f011-e08b-4209-be0e-3706bc53b487" path="/var/lib/kubelet/pods/5bf9f011-e08b-4209-be0e-3706bc53b487/volumes" Sep 29 20:16:19 crc kubenswrapper[4792]: I0929 20:16:19.023362 4792 scope.go:117] "RemoveContainer" containerID="0c32d1da37ddbc11c79c6be27e5d9a54bae2c6e1dd6cff428ba145dc4ed7552e" Sep 29 20:16:19 crc kubenswrapper[4792]: E0929 20:16:19.024029 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p5q59_openshift-machine-config-operator(0ae66548-086e-4ca9-bd6f-281ce46e7557)\"" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" podUID="0ae66548-086e-4ca9-bd6f-281ce46e7557" Sep 29 20:16:31 crc kubenswrapper[4792]: I0929 20:16:31.015707 4792 scope.go:117] "RemoveContainer" containerID="0c32d1da37ddbc11c79c6be27e5d9a54bae2c6e1dd6cff428ba145dc4ed7552e" Sep 29 20:16:31 crc kubenswrapper[4792]: E0929 20:16:31.016330 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p5q59_openshift-machine-config-operator(0ae66548-086e-4ca9-bd6f-281ce46e7557)\"" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" podUID="0ae66548-086e-4ca9-bd6f-281ce46e7557" Sep 29 20:16:39 crc kubenswrapper[4792]: I0929 20:16:39.581324 4792 scope.go:117] "RemoveContainer" containerID="722afaab1a67daa636189f2e5a41250af1a00f693a96dc2b090736d8c97b0a41" Sep 29 20:16:45 crc kubenswrapper[4792]: I0929 20:16:45.015602 4792 scope.go:117] "RemoveContainer" containerID="0c32d1da37ddbc11c79c6be27e5d9a54bae2c6e1dd6cff428ba145dc4ed7552e" Sep 29 20:16:45 crc kubenswrapper[4792]: E0929 20:16:45.016604 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p5q59_openshift-machine-config-operator(0ae66548-086e-4ca9-bd6f-281ce46e7557)\"" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" podUID="0ae66548-086e-4ca9-bd6f-281ce46e7557" Sep 29 20:16:58 crc kubenswrapper[4792]: I0929 20:16:58.309457 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-9nm55/must-gather-swl2j"] Sep 29 20:16:58 crc kubenswrapper[4792]: E0929 20:16:58.310524 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5bf9f011-e08b-4209-be0e-3706bc53b487" containerName="gather" Sep 29 20:16:58 crc kubenswrapper[4792]: I0929 20:16:58.310544 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="5bf9f011-e08b-4209-be0e-3706bc53b487" containerName="gather" Sep 29 20:16:58 crc kubenswrapper[4792]: E0929 20:16:58.310566 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="537039de-0fde-4bf4-806a-06568653d14d" containerName="registry-server" Sep 29 20:16:58 crc kubenswrapper[4792]: I0929 20:16:58.310574 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="537039de-0fde-4bf4-806a-06568653d14d" containerName="registry-server" Sep 29 20:16:58 crc kubenswrapper[4792]: E0929 20:16:58.310611 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="af974c14-099b-4120-a5d9-7ab25fce6282" containerName="collect-profiles" Sep 29 20:16:58 crc kubenswrapper[4792]: I0929 20:16:58.310621 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="af974c14-099b-4120-a5d9-7ab25fce6282" containerName="collect-profiles" Sep 29 20:16:58 crc kubenswrapper[4792]: E0929 20:16:58.310635 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5bf9f011-e08b-4209-be0e-3706bc53b487" containerName="copy" Sep 29 20:16:58 crc kubenswrapper[4792]: I0929 20:16:58.310642 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="5bf9f011-e08b-4209-be0e-3706bc53b487" containerName="copy" Sep 29 20:16:58 crc kubenswrapper[4792]: E0929 20:16:58.310655 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="537039de-0fde-4bf4-806a-06568653d14d" containerName="extract-content" Sep 29 20:16:58 crc kubenswrapper[4792]: I0929 20:16:58.310665 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="537039de-0fde-4bf4-806a-06568653d14d" containerName="extract-content" Sep 29 20:16:58 crc kubenswrapper[4792]: E0929 20:16:58.310682 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="537039de-0fde-4bf4-806a-06568653d14d" containerName="extract-utilities" Sep 29 20:16:58 crc kubenswrapper[4792]: I0929 20:16:58.310692 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="537039de-0fde-4bf4-806a-06568653d14d" containerName="extract-utilities" Sep 29 20:16:58 crc kubenswrapper[4792]: I0929 20:16:58.310934 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="5bf9f011-e08b-4209-be0e-3706bc53b487" containerName="copy" Sep 29 20:16:58 crc kubenswrapper[4792]: I0929 20:16:58.310971 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="af974c14-099b-4120-a5d9-7ab25fce6282" containerName="collect-profiles" Sep 29 20:16:58 crc kubenswrapper[4792]: I0929 20:16:58.310982 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="537039de-0fde-4bf4-806a-06568653d14d" containerName="registry-server" Sep 29 20:16:58 crc kubenswrapper[4792]: I0929 20:16:58.311016 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="5bf9f011-e08b-4209-be0e-3706bc53b487" containerName="gather" Sep 29 20:16:58 crc kubenswrapper[4792]: I0929 20:16:58.312171 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-9nm55/must-gather-swl2j" Sep 29 20:16:58 crc kubenswrapper[4792]: I0929 20:16:58.315478 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-9nm55"/"kube-root-ca.crt" Sep 29 20:16:58 crc kubenswrapper[4792]: I0929 20:16:58.315673 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-9nm55"/"openshift-service-ca.crt" Sep 29 20:16:58 crc kubenswrapper[4792]: I0929 20:16:58.336894 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-9nm55/must-gather-swl2j"] Sep 29 20:16:58 crc kubenswrapper[4792]: I0929 20:16:58.374972 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/7227bcb0-4ce5-440a-80ad-e18135a805ac-must-gather-output\") pod \"must-gather-swl2j\" (UID: \"7227bcb0-4ce5-440a-80ad-e18135a805ac\") " pod="openshift-must-gather-9nm55/must-gather-swl2j" Sep 29 20:16:58 crc kubenswrapper[4792]: I0929 20:16:58.375288 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f46q4\" (UniqueName: \"kubernetes.io/projected/7227bcb0-4ce5-440a-80ad-e18135a805ac-kube-api-access-f46q4\") pod \"must-gather-swl2j\" (UID: \"7227bcb0-4ce5-440a-80ad-e18135a805ac\") " pod="openshift-must-gather-9nm55/must-gather-swl2j" Sep 29 20:16:58 crc kubenswrapper[4792]: I0929 20:16:58.476760 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/7227bcb0-4ce5-440a-80ad-e18135a805ac-must-gather-output\") pod \"must-gather-swl2j\" (UID: \"7227bcb0-4ce5-440a-80ad-e18135a805ac\") " pod="openshift-must-gather-9nm55/must-gather-swl2j" Sep 29 20:16:58 crc kubenswrapper[4792]: I0929 20:16:58.477198 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/7227bcb0-4ce5-440a-80ad-e18135a805ac-must-gather-output\") pod \"must-gather-swl2j\" (UID: \"7227bcb0-4ce5-440a-80ad-e18135a805ac\") " pod="openshift-must-gather-9nm55/must-gather-swl2j" Sep 29 20:16:58 crc kubenswrapper[4792]: I0929 20:16:58.477420 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f46q4\" (UniqueName: \"kubernetes.io/projected/7227bcb0-4ce5-440a-80ad-e18135a805ac-kube-api-access-f46q4\") pod \"must-gather-swl2j\" (UID: \"7227bcb0-4ce5-440a-80ad-e18135a805ac\") " pod="openshift-must-gather-9nm55/must-gather-swl2j" Sep 29 20:16:58 crc kubenswrapper[4792]: I0929 20:16:58.496564 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f46q4\" (UniqueName: \"kubernetes.io/projected/7227bcb0-4ce5-440a-80ad-e18135a805ac-kube-api-access-f46q4\") pod \"must-gather-swl2j\" (UID: \"7227bcb0-4ce5-440a-80ad-e18135a805ac\") " pod="openshift-must-gather-9nm55/must-gather-swl2j" Sep 29 20:16:58 crc kubenswrapper[4792]: I0929 20:16:58.633541 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-9nm55/must-gather-swl2j" Sep 29 20:16:59 crc kubenswrapper[4792]: I0929 20:16:59.021503 4792 scope.go:117] "RemoveContainer" containerID="0c32d1da37ddbc11c79c6be27e5d9a54bae2c6e1dd6cff428ba145dc4ed7552e" Sep 29 20:16:59 crc kubenswrapper[4792]: E0929 20:16:59.022077 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p5q59_openshift-machine-config-operator(0ae66548-086e-4ca9-bd6f-281ce46e7557)\"" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" podUID="0ae66548-086e-4ca9-bd6f-281ce46e7557" Sep 29 20:16:59 crc kubenswrapper[4792]: I0929 20:16:59.165784 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-9nm55/must-gather-swl2j"] Sep 29 20:16:59 crc kubenswrapper[4792]: I0929 20:16:59.771677 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-9nm55/must-gather-swl2j" event={"ID":"7227bcb0-4ce5-440a-80ad-e18135a805ac","Type":"ContainerStarted","Data":"396ff91edd758e2f25ab9f7ae7319b29a728f8f5ff2e934244dbed8467d360ed"} Sep 29 20:16:59 crc kubenswrapper[4792]: I0929 20:16:59.772012 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-9nm55/must-gather-swl2j" event={"ID":"7227bcb0-4ce5-440a-80ad-e18135a805ac","Type":"ContainerStarted","Data":"ea9ad986f1669a8869fb09529a31480515f8a6422922df1c7e1f11680090b138"} Sep 29 20:16:59 crc kubenswrapper[4792]: I0929 20:16:59.772028 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-9nm55/must-gather-swl2j" event={"ID":"7227bcb0-4ce5-440a-80ad-e18135a805ac","Type":"ContainerStarted","Data":"cb4a684c4d6ad30082dad1e093002382ad79f0c6e403990841302e06719987d8"} Sep 29 20:16:59 crc kubenswrapper[4792]: I0929 20:16:59.805076 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-9nm55/must-gather-swl2j" podStartSLOduration=1.8050573810000001 podStartE2EDuration="1.805057381s" podCreationTimestamp="2025-09-29 20:16:58 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 20:16:59.799977268 +0000 UTC m=+4831.793284664" watchObservedRunningTime="2025-09-29 20:16:59.805057381 +0000 UTC m=+4831.798364777" Sep 29 20:17:03 crc kubenswrapper[4792]: I0929 20:17:03.227652 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-9nm55/crc-debug-k8qk9"] Sep 29 20:17:03 crc kubenswrapper[4792]: I0929 20:17:03.231194 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-9nm55/crc-debug-k8qk9" Sep 29 20:17:03 crc kubenswrapper[4792]: I0929 20:17:03.233207 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-must-gather-9nm55"/"default-dockercfg-xj8hg" Sep 29 20:17:03 crc kubenswrapper[4792]: I0929 20:17:03.329903 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/d7758184-18f7-4a86-9643-9efcbc3a829e-host\") pod \"crc-debug-k8qk9\" (UID: \"d7758184-18f7-4a86-9643-9efcbc3a829e\") " pod="openshift-must-gather-9nm55/crc-debug-k8qk9" Sep 29 20:17:03 crc kubenswrapper[4792]: I0929 20:17:03.330069 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tnb7j\" (UniqueName: \"kubernetes.io/projected/d7758184-18f7-4a86-9643-9efcbc3a829e-kube-api-access-tnb7j\") pod \"crc-debug-k8qk9\" (UID: \"d7758184-18f7-4a86-9643-9efcbc3a829e\") " pod="openshift-must-gather-9nm55/crc-debug-k8qk9" Sep 29 20:17:03 crc kubenswrapper[4792]: I0929 20:17:03.431461 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/d7758184-18f7-4a86-9643-9efcbc3a829e-host\") pod \"crc-debug-k8qk9\" (UID: \"d7758184-18f7-4a86-9643-9efcbc3a829e\") " pod="openshift-must-gather-9nm55/crc-debug-k8qk9" Sep 29 20:17:03 crc kubenswrapper[4792]: I0929 20:17:03.431589 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tnb7j\" (UniqueName: \"kubernetes.io/projected/d7758184-18f7-4a86-9643-9efcbc3a829e-kube-api-access-tnb7j\") pod \"crc-debug-k8qk9\" (UID: \"d7758184-18f7-4a86-9643-9efcbc3a829e\") " pod="openshift-must-gather-9nm55/crc-debug-k8qk9" Sep 29 20:17:03 crc kubenswrapper[4792]: I0929 20:17:03.431626 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/d7758184-18f7-4a86-9643-9efcbc3a829e-host\") pod \"crc-debug-k8qk9\" (UID: \"d7758184-18f7-4a86-9643-9efcbc3a829e\") " pod="openshift-must-gather-9nm55/crc-debug-k8qk9" Sep 29 20:17:03 crc kubenswrapper[4792]: I0929 20:17:03.453065 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tnb7j\" (UniqueName: \"kubernetes.io/projected/d7758184-18f7-4a86-9643-9efcbc3a829e-kube-api-access-tnb7j\") pod \"crc-debug-k8qk9\" (UID: \"d7758184-18f7-4a86-9643-9efcbc3a829e\") " pod="openshift-must-gather-9nm55/crc-debug-k8qk9" Sep 29 20:17:03 crc kubenswrapper[4792]: I0929 20:17:03.551799 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-9nm55/crc-debug-k8qk9" Sep 29 20:17:03 crc kubenswrapper[4792]: I0929 20:17:03.802698 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-9nm55/crc-debug-k8qk9" event={"ID":"d7758184-18f7-4a86-9643-9efcbc3a829e","Type":"ContainerStarted","Data":"ba885e4ac91df7470560b1825b95fac5a9bebbe75cbc8e497c71d141b8f981b8"} Sep 29 20:17:04 crc kubenswrapper[4792]: I0929 20:17:04.815656 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-9nm55/crc-debug-k8qk9" event={"ID":"d7758184-18f7-4a86-9643-9efcbc3a829e","Type":"ContainerStarted","Data":"57c5d7bc52cb4b6c598309bd8cd2c8764f58fca44416068346e91843b2cc1de7"} Sep 29 20:17:04 crc kubenswrapper[4792]: I0929 20:17:04.840111 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-9nm55/crc-debug-k8qk9" podStartSLOduration=1.840091207 podStartE2EDuration="1.840091207s" podCreationTimestamp="2025-09-29 20:17:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 20:17:04.828052023 +0000 UTC m=+4836.821359429" watchObservedRunningTime="2025-09-29 20:17:04.840091207 +0000 UTC m=+4836.833398623" Sep 29 20:17:10 crc kubenswrapper[4792]: I0929 20:17:10.015795 4792 scope.go:117] "RemoveContainer" containerID="0c32d1da37ddbc11c79c6be27e5d9a54bae2c6e1dd6cff428ba145dc4ed7552e" Sep 29 20:17:10 crc kubenswrapper[4792]: E0929 20:17:10.016673 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p5q59_openshift-machine-config-operator(0ae66548-086e-4ca9-bd6f-281ce46e7557)\"" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" podUID="0ae66548-086e-4ca9-bd6f-281ce46e7557" Sep 29 20:17:21 crc kubenswrapper[4792]: I0929 20:17:21.015423 4792 scope.go:117] "RemoveContainer" containerID="0c32d1da37ddbc11c79c6be27e5d9a54bae2c6e1dd6cff428ba145dc4ed7552e" Sep 29 20:17:21 crc kubenswrapper[4792]: E0929 20:17:21.016510 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p5q59_openshift-machine-config-operator(0ae66548-086e-4ca9-bd6f-281ce46e7557)\"" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" podUID="0ae66548-086e-4ca9-bd6f-281ce46e7557" Sep 29 20:17:33 crc kubenswrapper[4792]: I0929 20:17:33.015223 4792 scope.go:117] "RemoveContainer" containerID="0c32d1da37ddbc11c79c6be27e5d9a54bae2c6e1dd6cff428ba145dc4ed7552e" Sep 29 20:17:33 crc kubenswrapper[4792]: E0929 20:17:33.015965 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p5q59_openshift-machine-config-operator(0ae66548-086e-4ca9-bd6f-281ce46e7557)\"" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" podUID="0ae66548-086e-4ca9-bd6f-281ce46e7557" Sep 29 20:17:46 crc kubenswrapper[4792]: I0929 20:17:46.016543 4792 scope.go:117] "RemoveContainer" containerID="0c32d1da37ddbc11c79c6be27e5d9a54bae2c6e1dd6cff428ba145dc4ed7552e" Sep 29 20:17:47 crc kubenswrapper[4792]: I0929 20:17:47.199915 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" event={"ID":"0ae66548-086e-4ca9-bd6f-281ce46e7557","Type":"ContainerStarted","Data":"879c3d6f11c2b7321b612ee9d2f622b0df24ff3872f25e237d9ed363b4b76971"} Sep 29 20:18:30 crc kubenswrapper[4792]: I0929 20:18:30.673243 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-t8fbp"] Sep 29 20:18:30 crc kubenswrapper[4792]: I0929 20:18:30.675682 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-t8fbp" Sep 29 20:18:30 crc kubenswrapper[4792]: I0929 20:18:30.699675 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-t8fbp"] Sep 29 20:18:30 crc kubenswrapper[4792]: I0929 20:18:30.720584 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/03849d44-e42b-4a12-8667-d7d98590e32f-utilities\") pod \"redhat-operators-t8fbp\" (UID: \"03849d44-e42b-4a12-8667-d7d98590e32f\") " pod="openshift-marketplace/redhat-operators-t8fbp" Sep 29 20:18:30 crc kubenswrapper[4792]: I0929 20:18:30.720670 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k5nrv\" (UniqueName: \"kubernetes.io/projected/03849d44-e42b-4a12-8667-d7d98590e32f-kube-api-access-k5nrv\") pod \"redhat-operators-t8fbp\" (UID: \"03849d44-e42b-4a12-8667-d7d98590e32f\") " pod="openshift-marketplace/redhat-operators-t8fbp" Sep 29 20:18:30 crc kubenswrapper[4792]: I0929 20:18:30.720737 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/03849d44-e42b-4a12-8667-d7d98590e32f-catalog-content\") pod \"redhat-operators-t8fbp\" (UID: \"03849d44-e42b-4a12-8667-d7d98590e32f\") " pod="openshift-marketplace/redhat-operators-t8fbp" Sep 29 20:18:30 crc kubenswrapper[4792]: I0929 20:18:30.822939 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/03849d44-e42b-4a12-8667-d7d98590e32f-catalog-content\") pod \"redhat-operators-t8fbp\" (UID: \"03849d44-e42b-4a12-8667-d7d98590e32f\") " pod="openshift-marketplace/redhat-operators-t8fbp" Sep 29 20:18:30 crc kubenswrapper[4792]: I0929 20:18:30.823075 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/03849d44-e42b-4a12-8667-d7d98590e32f-utilities\") pod \"redhat-operators-t8fbp\" (UID: \"03849d44-e42b-4a12-8667-d7d98590e32f\") " pod="openshift-marketplace/redhat-operators-t8fbp" Sep 29 20:18:30 crc kubenswrapper[4792]: I0929 20:18:30.823126 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k5nrv\" (UniqueName: \"kubernetes.io/projected/03849d44-e42b-4a12-8667-d7d98590e32f-kube-api-access-k5nrv\") pod \"redhat-operators-t8fbp\" (UID: \"03849d44-e42b-4a12-8667-d7d98590e32f\") " pod="openshift-marketplace/redhat-operators-t8fbp" Sep 29 20:18:30 crc kubenswrapper[4792]: I0929 20:18:30.823589 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/03849d44-e42b-4a12-8667-d7d98590e32f-catalog-content\") pod \"redhat-operators-t8fbp\" (UID: \"03849d44-e42b-4a12-8667-d7d98590e32f\") " pod="openshift-marketplace/redhat-operators-t8fbp" Sep 29 20:18:30 crc kubenswrapper[4792]: I0929 20:18:30.823600 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/03849d44-e42b-4a12-8667-d7d98590e32f-utilities\") pod \"redhat-operators-t8fbp\" (UID: \"03849d44-e42b-4a12-8667-d7d98590e32f\") " pod="openshift-marketplace/redhat-operators-t8fbp" Sep 29 20:18:30 crc kubenswrapper[4792]: I0929 20:18:30.848359 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k5nrv\" (UniqueName: \"kubernetes.io/projected/03849d44-e42b-4a12-8667-d7d98590e32f-kube-api-access-k5nrv\") pod \"redhat-operators-t8fbp\" (UID: \"03849d44-e42b-4a12-8667-d7d98590e32f\") " pod="openshift-marketplace/redhat-operators-t8fbp" Sep 29 20:18:30 crc kubenswrapper[4792]: I0929 20:18:30.903937 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-65c5d84686-mddqt_9edbf81b-9313-4a4c-8dd0-b29b82f32888/barbican-api/0.log" Sep 29 20:18:30 crc kubenswrapper[4792]: I0929 20:18:30.988412 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-65c5d84686-mddqt_9edbf81b-9313-4a4c-8dd0-b29b82f32888/barbican-api-log/0.log" Sep 29 20:18:31 crc kubenswrapper[4792]: I0929 20:18:31.043071 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-t8fbp" Sep 29 20:18:31 crc kubenswrapper[4792]: I0929 20:18:31.263205 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-6864b589b6-rj9q8_799e0c59-e4f3-4d1c-8b3e-4771a3d5fecb/barbican-keystone-listener/0.log" Sep 29 20:18:31 crc kubenswrapper[4792]: I0929 20:18:31.583259 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-6864b589b6-rj9q8_799e0c59-e4f3-4d1c-8b3e-4771a3d5fecb/barbican-keystone-listener-log/0.log" Sep 29 20:18:31 crc kubenswrapper[4792]: I0929 20:18:31.776556 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-5697845c85-cnq66_c4ee74b8-8ac4-4a34-967b-6fcb220e90fa/barbican-worker/0.log" Sep 29 20:18:31 crc kubenswrapper[4792]: I0929 20:18:31.969632 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-t8fbp"] Sep 29 20:18:32 crc kubenswrapper[4792]: I0929 20:18:32.048532 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-5697845c85-cnq66_c4ee74b8-8ac4-4a34-967b-6fcb220e90fa/barbican-worker-log/0.log" Sep 29 20:18:32 crc kubenswrapper[4792]: I0929 20:18:32.172364 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_bootstrap-edpm-deployment-openstack-edpm-ipam-slpnn_3dbdb326-a5bc-4d53-b4cc-6971b8a715e6/bootstrap-edpm-deployment-openstack-edpm-ipam/0.log" Sep 29 20:18:32 crc kubenswrapper[4792]: I0929 20:18:32.598638 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_d4da2b2f-cfd1-4d28-96fb-c7c93955a3b4/ceilometer-central-agent/0.log" Sep 29 20:18:32 crc kubenswrapper[4792]: I0929 20:18:32.649923 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-t8fbp" event={"ID":"03849d44-e42b-4a12-8667-d7d98590e32f","Type":"ContainerStarted","Data":"4bc0cb92723b2a2566a27ffbca551d7de705d59c55237fb490c91965e0e7d6af"} Sep 29 20:18:32 crc kubenswrapper[4792]: I0929 20:18:32.649964 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-t8fbp" event={"ID":"03849d44-e42b-4a12-8667-d7d98590e32f","Type":"ContainerStarted","Data":"ff0b0972ebffad53c3a8cd7475371b3c338809dd23f2fcb4128371615af63928"} Sep 29 20:18:33 crc kubenswrapper[4792]: I0929 20:18:33.083392 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_d4da2b2f-cfd1-4d28-96fb-c7c93955a3b4/ceilometer-notification-agent/0.log" Sep 29 20:18:33 crc kubenswrapper[4792]: I0929 20:18:33.129618 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_d4da2b2f-cfd1-4d28-96fb-c7c93955a3b4/sg-core/0.log" Sep 29 20:18:33 crc kubenswrapper[4792]: I0929 20:18:33.135442 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_d4da2b2f-cfd1-4d28-96fb-c7c93955a3b4/proxy-httpd/0.log" Sep 29 20:18:33 crc kubenswrapper[4792]: I0929 20:18:33.375053 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_0c9927d9-0800-4bfa-bee9-af02caf9596c/cinder-api-log/0.log" Sep 29 20:18:33 crc kubenswrapper[4792]: I0929 20:18:33.425630 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_0c9927d9-0800-4bfa-bee9-af02caf9596c/cinder-api/0.log" Sep 29 20:18:33 crc kubenswrapper[4792]: I0929 20:18:33.657118 4792 generic.go:334] "Generic (PLEG): container finished" podID="03849d44-e42b-4a12-8667-d7d98590e32f" containerID="4bc0cb92723b2a2566a27ffbca551d7de705d59c55237fb490c91965e0e7d6af" exitCode=0 Sep 29 20:18:33 crc kubenswrapper[4792]: I0929 20:18:33.657155 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-t8fbp" event={"ID":"03849d44-e42b-4a12-8667-d7d98590e32f","Type":"ContainerDied","Data":"4bc0cb92723b2a2566a27ffbca551d7de705d59c55237fb490c91965e0e7d6af"} Sep 29 20:18:33 crc kubenswrapper[4792]: I0929 20:18:33.698591 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_705349a9-36dc-4230-a7ff-e097fc5b66d7/cinder-scheduler/0.log" Sep 29 20:18:33 crc kubenswrapper[4792]: I0929 20:18:33.730485 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_705349a9-36dc-4230-a7ff-e097fc5b66d7/probe/0.log" Sep 29 20:18:34 crc kubenswrapper[4792]: I0929 20:18:34.560706 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-network-edpm-deployment-openstack-edpm-ipam-6xccv_723e4395-18dd-4729-be31-1c5ccf8e7ec8/configure-network-edpm-deployment-openstack-edpm-ipam/0.log" Sep 29 20:18:34 crc kubenswrapper[4792]: I0929 20:18:34.665514 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-t8fbp" event={"ID":"03849d44-e42b-4a12-8667-d7d98590e32f","Type":"ContainerStarted","Data":"e113bdf7b769e0e7f41bd661bd3584c23dd619bed98dbbfc169495ade23896e4"} Sep 29 20:18:34 crc kubenswrapper[4792]: I0929 20:18:34.857759 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-67cb876dc9-v2rp9_6d1053e6-7c5c-4c2c-828d-c9241606b3e1/init/0.log" Sep 29 20:18:34 crc kubenswrapper[4792]: I0929 20:18:34.931518 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-os-edpm-deployment-openstack-edpm-ipam-jfzzt_5d30a56f-01e0-422e-99bd-08328d009094/configure-os-edpm-deployment-openstack-edpm-ipam/0.log" Sep 29 20:18:35 crc kubenswrapper[4792]: I0929 20:18:35.053169 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-67cb876dc9-v2rp9_6d1053e6-7c5c-4c2c-828d-c9241606b3e1/init/0.log" Sep 29 20:18:35 crc kubenswrapper[4792]: I0929 20:18:35.247346 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-67cb876dc9-v2rp9_6d1053e6-7c5c-4c2c-828d-c9241606b3e1/dnsmasq-dns/0.log" Sep 29 20:18:35 crc kubenswrapper[4792]: I0929 20:18:35.343214 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_download-cache-edpm-deployment-openstack-edpm-ipam-b8wjj_c90a0d9d-bf42-4d49-9527-e859ffce83a0/download-cache-edpm-deployment-openstack-edpm-ipam/0.log" Sep 29 20:18:35 crc kubenswrapper[4792]: I0929 20:18:35.417941 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_a0dfd532-bb4f-4a68-b334-e9dcdd0e2fa2/glance-httpd/0.log" Sep 29 20:18:35 crc kubenswrapper[4792]: I0929 20:18:35.499488 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_a0dfd532-bb4f-4a68-b334-e9dcdd0e2fa2/glance-log/0.log" Sep 29 20:18:35 crc kubenswrapper[4792]: I0929 20:18:35.589683 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_fe46ba4e-dc2a-4960-97d1-fd34116ee7d6/glance-httpd/0.log" Sep 29 20:18:35 crc kubenswrapper[4792]: I0929 20:18:35.686872 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_fe46ba4e-dc2a-4960-97d1-fd34116ee7d6/glance-log/0.log" Sep 29 20:18:35 crc kubenswrapper[4792]: I0929 20:18:35.991900 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_horizon-8494dffd6-7rx5p_23845288-b122-49f0-b10d-641cfb94b66f/horizon/0.log" Sep 29 20:18:35 crc kubenswrapper[4792]: I0929 20:18:35.993870 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_horizon-8494dffd6-7rx5p_23845288-b122-49f0-b10d-641cfb94b66f/horizon/1.log" Sep 29 20:18:36 crc kubenswrapper[4792]: I0929 20:18:36.347207 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_install-certs-edpm-deployment-openstack-edpm-ipam-gfzmv_6ee2a07b-5943-4517-be5e-e1803f9d8a55/install-certs-edpm-deployment-openstack-edpm-ipam/0.log" Sep 29 20:18:36 crc kubenswrapper[4792]: I0929 20:18:36.383118 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_horizon-8494dffd6-7rx5p_23845288-b122-49f0-b10d-641cfb94b66f/horizon-log/0.log" Sep 29 20:18:36 crc kubenswrapper[4792]: I0929 20:18:36.852556 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_install-os-edpm-deployment-openstack-edpm-ipam-ppdnh_af95758e-6a40-4679-ba1c-8ebf988f1865/install-os-edpm-deployment-openstack-edpm-ipam/0.log" Sep 29 20:18:37 crc kubenswrapper[4792]: I0929 20:18:37.215341 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-cron-29319601-w5m7k_6885051d-f7a5-4076-a670-778fbd8d23ca/keystone-cron/0.log" Sep 29 20:18:37 crc kubenswrapper[4792]: I0929 20:18:37.306568 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-754c4b8fcb-w2t8n_a4b671bb-328e-401e-933f-665848067860/keystone-api/0.log" Sep 29 20:18:37 crc kubenswrapper[4792]: I0929 20:18:37.367653 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_kube-state-metrics-0_e148125d-7567-47d9-a3c3-32bd51ee3c9c/kube-state-metrics/0.log" Sep 29 20:18:37 crc kubenswrapper[4792]: I0929 20:18:37.668953 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_libvirt-edpm-deployment-openstack-edpm-ipam-s77fc_1a5948bb-2b33-40f6-9a12-1b8b4e3071a7/libvirt-edpm-deployment-openstack-edpm-ipam/0.log" Sep 29 20:18:38 crc kubenswrapper[4792]: I0929 20:18:38.417175 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-554fb67fd9-fh25j_015e1c15-2d65-42f6-8883-b0be2b5dc0ef/neutron-httpd/0.log" Sep 29 20:18:38 crc kubenswrapper[4792]: I0929 20:18:38.428150 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-metadata-edpm-deployment-openstack-edpm-ipam-jmkwf_e87aba80-3b0a-409a-8b12-3a8b7c1290d8/neutron-metadata-edpm-deployment-openstack-edpm-ipam/0.log" Sep 29 20:18:38 crc kubenswrapper[4792]: E0929 20:18:38.603753 4792 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod03849d44_e42b_4a12_8667_d7d98590e32f.slice/crio-conmon-e113bdf7b769e0e7f41bd661bd3584c23dd619bed98dbbfc169495ade23896e4.scope\": RecentStats: unable to find data in memory cache]" Sep 29 20:18:38 crc kubenswrapper[4792]: I0929 20:18:38.649643 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-554fb67fd9-fh25j_015e1c15-2d65-42f6-8883-b0be2b5dc0ef/neutron-api/0.log" Sep 29 20:18:38 crc kubenswrapper[4792]: I0929 20:18:38.696699 4792 generic.go:334] "Generic (PLEG): container finished" podID="03849d44-e42b-4a12-8667-d7d98590e32f" containerID="e113bdf7b769e0e7f41bd661bd3584c23dd619bed98dbbfc169495ade23896e4" exitCode=0 Sep 29 20:18:38 crc kubenswrapper[4792]: I0929 20:18:38.696739 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-t8fbp" event={"ID":"03849d44-e42b-4a12-8667-d7d98590e32f","Type":"ContainerDied","Data":"e113bdf7b769e0e7f41bd661bd3584c23dd619bed98dbbfc169495ade23896e4"} Sep 29 20:18:39 crc kubenswrapper[4792]: I0929 20:18:39.727438 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-t8fbp" event={"ID":"03849d44-e42b-4a12-8667-d7d98590e32f","Type":"ContainerStarted","Data":"2a6bff9f333be0a2cf8acfb42968d8d056c6429dbedccdbff928162bff73ff8f"} Sep 29 20:18:39 crc kubenswrapper[4792]: I0929 20:18:39.734436 4792 scope.go:117] "RemoveContainer" containerID="cb681aab73917e6e488fadafc476b2ba5b0a8d4acb994922f68c4071342dd5a0" Sep 29 20:18:39 crc kubenswrapper[4792]: I0929 20:18:39.747400 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-t8fbp" podStartSLOduration=4.06770909 podStartE2EDuration="9.747377605s" podCreationTimestamp="2025-09-29 20:18:30 +0000 UTC" firstStartedPulling="2025-09-29 20:18:33.658921773 +0000 UTC m=+4925.652229169" lastFinishedPulling="2025-09-29 20:18:39.338590298 +0000 UTC m=+4931.331897684" observedRunningTime="2025-09-29 20:18:39.746473161 +0000 UTC m=+4931.739780567" watchObservedRunningTime="2025-09-29 20:18:39.747377605 +0000 UTC m=+4931.740685001" Sep 29 20:18:39 crc kubenswrapper[4792]: I0929 20:18:39.821339 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell0-conductor-0_8c04cb97-af56-4b40-b086-990e57b48c15/nova-cell0-conductor-conductor/0.log" Sep 29 20:18:40 crc kubenswrapper[4792]: I0929 20:18:40.417711 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_70e6c218-833c-460f-a81f-e126902df64b/nova-api-log/0.log" Sep 29 20:18:40 crc kubenswrapper[4792]: I0929 20:18:40.911066 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_70e6c218-833c-460f-a81f-e126902df64b/nova-api-api/0.log" Sep 29 20:18:41 crc kubenswrapper[4792]: I0929 20:18:41.043166 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-t8fbp" Sep 29 20:18:41 crc kubenswrapper[4792]: I0929 20:18:41.043230 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-t8fbp" Sep 29 20:18:41 crc kubenswrapper[4792]: I0929 20:18:41.148781 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-conductor-0_454e3f84-9408-4433-9e44-15dde1100854/nova-cell1-conductor-conductor/0.log" Sep 29 20:18:41 crc kubenswrapper[4792]: I0929 20:18:41.821821 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-edpm-deployment-openstack-edpm-ipam-ghrcf_477ec7c1-0c72-4b69-9a72-05d465fe26b9/nova-edpm-deployment-openstack-edpm-ipam/0.log" Sep 29 20:18:41 crc kubenswrapper[4792]: I0929 20:18:41.880599 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-novncproxy-0_84ede8b3-cba9-4133-bfce-14f44cba07b8/nova-cell1-novncproxy-novncproxy/0.log" Sep 29 20:18:42 crc kubenswrapper[4792]: I0929 20:18:42.092877 4792 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-t8fbp" podUID="03849d44-e42b-4a12-8667-d7d98590e32f" containerName="registry-server" probeResult="failure" output=< Sep 29 20:18:42 crc kubenswrapper[4792]: timeout: failed to connect service ":50051" within 1s Sep 29 20:18:42 crc kubenswrapper[4792]: > Sep 29 20:18:42 crc kubenswrapper[4792]: I0929 20:18:42.323763 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_memcached-0_ea8bd43c-bb10-450a-b564-c7b4247d1252/memcached/0.log" Sep 29 20:18:42 crc kubenswrapper[4792]: I0929 20:18:42.329623 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_de3fc643-e567-4ae6-b446-f861b63822d7/nova-metadata-log/0.log" Sep 29 20:18:43 crc kubenswrapper[4792]: I0929 20:18:43.037082 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_9a26454d-9ce8-4591-a7dd-6f8d4df5e3a4/mysql-bootstrap/0.log" Sep 29 20:18:43 crc kubenswrapper[4792]: I0929 20:18:43.397058 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-scheduler-0_38e4e8fe-a752-4d8c-aea2-07c6a92a7216/nova-scheduler-scheduler/0.log" Sep 29 20:18:43 crc kubenswrapper[4792]: I0929 20:18:43.448345 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_9a26454d-9ce8-4591-a7dd-6f8d4df5e3a4/mysql-bootstrap/0.log" Sep 29 20:18:43 crc kubenswrapper[4792]: I0929 20:18:43.590970 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_de3fc643-e567-4ae6-b446-f861b63822d7/nova-metadata-metadata/0.log" Sep 29 20:18:43 crc kubenswrapper[4792]: I0929 20:18:43.722463 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_9a26454d-9ce8-4591-a7dd-6f8d4df5e3a4/galera/0.log" Sep 29 20:18:43 crc kubenswrapper[4792]: I0929 20:18:43.850909 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_5c8592a0-091a-48ce-996c-f42bbdaf240c/mysql-bootstrap/0.log" Sep 29 20:18:43 crc kubenswrapper[4792]: I0929 20:18:43.996667 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_5c8592a0-091a-48ce-996c-f42bbdaf240c/mysql-bootstrap/0.log" Sep 29 20:18:44 crc kubenswrapper[4792]: I0929 20:18:44.106273 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_5c8592a0-091a-48ce-996c-f42bbdaf240c/galera/0.log" Sep 29 20:18:44 crc kubenswrapper[4792]: I0929 20:18:44.125406 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstackclient_36d9080e-6ba5-4a59-ac59-21f8a868df0d/openstackclient/0.log" Sep 29 20:18:44 crc kubenswrapper[4792]: I0929 20:18:44.524035 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-metrics-jcjdl_51d7ae2f-4cac-4245-b001-91413652f89e/openstack-network-exporter/0.log" Sep 29 20:18:44 crc kubenswrapper[4792]: I0929 20:18:44.598735 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-mh2vn_904d363d-f0dc-4318-9f28-d06e374a4838/ovsdb-server-init/0.log" Sep 29 20:18:44 crc kubenswrapper[4792]: I0929 20:18:44.831681 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-mh2vn_904d363d-f0dc-4318-9f28-d06e374a4838/ovs-vswitchd/0.log" Sep 29 20:18:44 crc kubenswrapper[4792]: I0929 20:18:44.877870 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-mh2vn_904d363d-f0dc-4318-9f28-d06e374a4838/ovsdb-server/0.log" Sep 29 20:18:44 crc kubenswrapper[4792]: I0929 20:18:44.890213 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-mh2vn_904d363d-f0dc-4318-9f28-d06e374a4838/ovsdb-server-init/0.log" Sep 29 20:18:45 crc kubenswrapper[4792]: I0929 20:18:45.055187 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-zvckm_321cc22b-3e6d-429f-aba5-d69c973d889e/ovn-controller/0.log" Sep 29 20:18:45 crc kubenswrapper[4792]: I0929 20:18:45.209623 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-edpm-deployment-openstack-edpm-ipam-vx68g_c694fcd6-bd39-4ec9-9b52-536c53bfff92/ovn-edpm-deployment-openstack-edpm-ipam/0.log" Sep 29 20:18:45 crc kubenswrapper[4792]: I0929 20:18:45.252730 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_934bc291-1ca4-4155-bb99-b3fde7a0d5e5/openstack-network-exporter/0.log" Sep 29 20:18:45 crc kubenswrapper[4792]: I0929 20:18:45.468559 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_934bc291-1ca4-4155-bb99-b3fde7a0d5e5/ovn-northd/0.log" Sep 29 20:18:45 crc kubenswrapper[4792]: I0929 20:18:45.474188 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_da980a6f-8dcf-4d5b-a972-fc646865967c/openstack-network-exporter/0.log" Sep 29 20:18:45 crc kubenswrapper[4792]: I0929 20:18:45.512613 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_da980a6f-8dcf-4d5b-a972-fc646865967c/ovsdbserver-nb/0.log" Sep 29 20:18:45 crc kubenswrapper[4792]: I0929 20:18:45.711937 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_272430d7-51cd-4f45-bfdd-73ed83ab0bc2/openstack-network-exporter/0.log" Sep 29 20:18:45 crc kubenswrapper[4792]: I0929 20:18:45.866506 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_272430d7-51cd-4f45-bfdd-73ed83ab0bc2/ovsdbserver-sb/0.log" Sep 29 20:18:46 crc kubenswrapper[4792]: I0929 20:18:46.025895 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-666f667548-cm9kb_d463ed77-f74f-4724-b942-1f542755d4d4/placement-api/0.log" Sep 29 20:18:46 crc kubenswrapper[4792]: I0929 20:18:46.155115 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_4e364c89-8b07-427c-a59a-c4576f98ddf2/setup-container/0.log" Sep 29 20:18:46 crc kubenswrapper[4792]: I0929 20:18:46.190876 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-666f667548-cm9kb_d463ed77-f74f-4724-b942-1f542755d4d4/placement-log/0.log" Sep 29 20:18:46 crc kubenswrapper[4792]: I0929 20:18:46.397554 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_4e364c89-8b07-427c-a59a-c4576f98ddf2/setup-container/0.log" Sep 29 20:18:46 crc kubenswrapper[4792]: I0929 20:18:46.476252 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_4e364c89-8b07-427c-a59a-c4576f98ddf2/rabbitmq/0.log" Sep 29 20:18:46 crc kubenswrapper[4792]: I0929 20:18:46.640071 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_dd9e8433-9eac-49a2-bacd-7acb220b0efd/setup-container/0.log" Sep 29 20:18:46 crc kubenswrapper[4792]: I0929 20:18:46.823673 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_dd9e8433-9eac-49a2-bacd-7acb220b0efd/rabbitmq/0.log" Sep 29 20:18:46 crc kubenswrapper[4792]: I0929 20:18:46.846254 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_dd9e8433-9eac-49a2-bacd-7acb220b0efd/setup-container/0.log" Sep 29 20:18:46 crc kubenswrapper[4792]: I0929 20:18:46.906300 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_reboot-os-edpm-deployment-openstack-edpm-ipam-25chb_3c2d29d5-9c65-4cb4-b66d-aeffaff2201f/reboot-os-edpm-deployment-openstack-edpm-ipam/0.log" Sep 29 20:18:47 crc kubenswrapper[4792]: I0929 20:18:47.098383 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_redhat-edpm-deployment-openstack-edpm-ipam-867d4_a44c4b7c-994b-4f5f-8b00-ca9da0a744f4/redhat-edpm-deployment-openstack-edpm-ipam/0.log" Sep 29 20:18:47 crc kubenswrapper[4792]: I0929 20:18:47.161520 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_repo-setup-edpm-deployment-openstack-edpm-ipam-x2x5c_15519058-5c31-4b09-b9e8-68129ad2f41e/repo-setup-edpm-deployment-openstack-edpm-ipam/0.log" Sep 29 20:18:47 crc kubenswrapper[4792]: I0929 20:18:47.329791 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_run-os-edpm-deployment-openstack-edpm-ipam-89vq8_c9424cca-92f5-490d-9a25-5feaa7010200/run-os-edpm-deployment-openstack-edpm-ipam/0.log" Sep 29 20:18:47 crc kubenswrapper[4792]: I0929 20:18:47.504376 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ssh-known-hosts-edpm-deployment-5w5vz_352317f9-484f-4680-aea0-8ebf9c6b4e44/ssh-known-hosts-edpm-deployment/0.log" Sep 29 20:18:47 crc kubenswrapper[4792]: I0929 20:18:47.685405 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-proxy-5dd4fd546c-9hwf9_8d8f74d0-be39-457e-ad50-c21d43cc942e/proxy-httpd/0.log" Sep 29 20:18:47 crc kubenswrapper[4792]: I0929 20:18:47.809478 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-proxy-5dd4fd546c-9hwf9_8d8f74d0-be39-457e-ad50-c21d43cc942e/proxy-server/0.log" Sep 29 20:18:48 crc kubenswrapper[4792]: I0929 20:18:48.169622 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-ring-rebalance-scl6k_654442d0-5361-4c10-b60a-2eb3bcf71acd/swift-ring-rebalance/0.log" Sep 29 20:18:48 crc kubenswrapper[4792]: I0929 20:18:48.192077 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_cd33a904-c32b-4781-b3fe-53d903764497/account-auditor/0.log" Sep 29 20:18:48 crc kubenswrapper[4792]: I0929 20:18:48.266913 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_cd33a904-c32b-4781-b3fe-53d903764497/account-reaper/0.log" Sep 29 20:18:48 crc kubenswrapper[4792]: I0929 20:18:48.385164 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_cd33a904-c32b-4781-b3fe-53d903764497/account-server/0.log" Sep 29 20:18:48 crc kubenswrapper[4792]: I0929 20:18:48.462939 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_cd33a904-c32b-4781-b3fe-53d903764497/account-replicator/0.log" Sep 29 20:18:48 crc kubenswrapper[4792]: I0929 20:18:48.489345 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_cd33a904-c32b-4781-b3fe-53d903764497/container-auditor/0.log" Sep 29 20:18:48 crc kubenswrapper[4792]: I0929 20:18:48.575646 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_cd33a904-c32b-4781-b3fe-53d903764497/container-replicator/0.log" Sep 29 20:18:48 crc kubenswrapper[4792]: I0929 20:18:48.656669 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_cd33a904-c32b-4781-b3fe-53d903764497/container-server/0.log" Sep 29 20:18:48 crc kubenswrapper[4792]: I0929 20:18:48.703539 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_cd33a904-c32b-4781-b3fe-53d903764497/container-updater/0.log" Sep 29 20:18:48 crc kubenswrapper[4792]: I0929 20:18:48.813038 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_cd33a904-c32b-4781-b3fe-53d903764497/object-auditor/0.log" Sep 29 20:18:48 crc kubenswrapper[4792]: I0929 20:18:48.842603 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_cd33a904-c32b-4781-b3fe-53d903764497/object-expirer/0.log" Sep 29 20:18:49 crc kubenswrapper[4792]: I0929 20:18:49.548562 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_cd33a904-c32b-4781-b3fe-53d903764497/rsync/0.log" Sep 29 20:18:49 crc kubenswrapper[4792]: I0929 20:18:49.570511 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_cd33a904-c32b-4781-b3fe-53d903764497/object-updater/0.log" Sep 29 20:18:49 crc kubenswrapper[4792]: I0929 20:18:49.619444 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_cd33a904-c32b-4781-b3fe-53d903764497/object-replicator/0.log" Sep 29 20:18:49 crc kubenswrapper[4792]: I0929 20:18:49.663109 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_cd33a904-c32b-4781-b3fe-53d903764497/object-server/0.log" Sep 29 20:18:49 crc kubenswrapper[4792]: I0929 20:18:49.760296 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_cd33a904-c32b-4781-b3fe-53d903764497/swift-recon-cron/0.log" Sep 29 20:18:49 crc kubenswrapper[4792]: I0929 20:18:49.963223 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_telemetry-edpm-deployment-openstack-edpm-ipam-hfvsz_62dafb72-d440-48ec-af0e-46ee7e16ab5a/telemetry-edpm-deployment-openstack-edpm-ipam/0.log" Sep 29 20:18:50 crc kubenswrapper[4792]: I0929 20:18:50.042481 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_tempest-tests-tempest_49e8a61d-e4e3-4510-b209-7d6fb5b02e2b/tempest-tests-tempest-tests-runner/0.log" Sep 29 20:18:50 crc kubenswrapper[4792]: I0929 20:18:50.149356 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_test-operator-logs-pod-tempest-tempest-tests-tempest_33fd9eda-fc31-456e-8408-b9483c1fef79/test-operator-logs-container/0.log" Sep 29 20:18:50 crc kubenswrapper[4792]: I0929 20:18:50.338866 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_validate-network-edpm-deployment-openstack-edpm-ipam-frsdd_6040c28a-468b-4253-8a8f-8fc98326b48b/validate-network-edpm-deployment-openstack-edpm-ipam/0.log" Sep 29 20:18:52 crc kubenswrapper[4792]: I0929 20:18:52.086897 4792 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-t8fbp" podUID="03849d44-e42b-4a12-8667-d7d98590e32f" containerName="registry-server" probeResult="failure" output=< Sep 29 20:18:52 crc kubenswrapper[4792]: timeout: failed to connect service ":50051" within 1s Sep 29 20:18:52 crc kubenswrapper[4792]: > Sep 29 20:19:01 crc kubenswrapper[4792]: I0929 20:19:01.106693 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-t8fbp" Sep 29 20:19:01 crc kubenswrapper[4792]: I0929 20:19:01.158553 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-t8fbp" Sep 29 20:19:01 crc kubenswrapper[4792]: I0929 20:19:01.857793 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-t8fbp"] Sep 29 20:19:02 crc kubenswrapper[4792]: I0929 20:19:02.948835 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-t8fbp" podUID="03849d44-e42b-4a12-8667-d7d98590e32f" containerName="registry-server" containerID="cri-o://2a6bff9f333be0a2cf8acfb42968d8d056c6429dbedccdbff928162bff73ff8f" gracePeriod=2 Sep 29 20:19:03 crc kubenswrapper[4792]: I0929 20:19:03.474911 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-t8fbp" Sep 29 20:19:03 crc kubenswrapper[4792]: I0929 20:19:03.621171 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/03849d44-e42b-4a12-8667-d7d98590e32f-catalog-content\") pod \"03849d44-e42b-4a12-8667-d7d98590e32f\" (UID: \"03849d44-e42b-4a12-8667-d7d98590e32f\") " Sep 29 20:19:03 crc kubenswrapper[4792]: I0929 20:19:03.621371 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/03849d44-e42b-4a12-8667-d7d98590e32f-utilities\") pod \"03849d44-e42b-4a12-8667-d7d98590e32f\" (UID: \"03849d44-e42b-4a12-8667-d7d98590e32f\") " Sep 29 20:19:03 crc kubenswrapper[4792]: I0929 20:19:03.621439 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-k5nrv\" (UniqueName: \"kubernetes.io/projected/03849d44-e42b-4a12-8667-d7d98590e32f-kube-api-access-k5nrv\") pod \"03849d44-e42b-4a12-8667-d7d98590e32f\" (UID: \"03849d44-e42b-4a12-8667-d7d98590e32f\") " Sep 29 20:19:03 crc kubenswrapper[4792]: I0929 20:19:03.621985 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/03849d44-e42b-4a12-8667-d7d98590e32f-utilities" (OuterVolumeSpecName: "utilities") pod "03849d44-e42b-4a12-8667-d7d98590e32f" (UID: "03849d44-e42b-4a12-8667-d7d98590e32f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 20:19:03 crc kubenswrapper[4792]: I0929 20:19:03.631972 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/03849d44-e42b-4a12-8667-d7d98590e32f-kube-api-access-k5nrv" (OuterVolumeSpecName: "kube-api-access-k5nrv") pod "03849d44-e42b-4a12-8667-d7d98590e32f" (UID: "03849d44-e42b-4a12-8667-d7d98590e32f"). InnerVolumeSpecName "kube-api-access-k5nrv". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 20:19:03 crc kubenswrapper[4792]: I0929 20:19:03.706073 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/03849d44-e42b-4a12-8667-d7d98590e32f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "03849d44-e42b-4a12-8667-d7d98590e32f" (UID: "03849d44-e42b-4a12-8667-d7d98590e32f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 20:19:03 crc kubenswrapper[4792]: I0929 20:19:03.723773 4792 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/03849d44-e42b-4a12-8667-d7d98590e32f-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 20:19:03 crc kubenswrapper[4792]: I0929 20:19:03.724015 4792 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/03849d44-e42b-4a12-8667-d7d98590e32f-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 20:19:03 crc kubenswrapper[4792]: I0929 20:19:03.724081 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-k5nrv\" (UniqueName: \"kubernetes.io/projected/03849d44-e42b-4a12-8667-d7d98590e32f-kube-api-access-k5nrv\") on node \"crc\" DevicePath \"\"" Sep 29 20:19:03 crc kubenswrapper[4792]: I0929 20:19:03.959981 4792 generic.go:334] "Generic (PLEG): container finished" podID="03849d44-e42b-4a12-8667-d7d98590e32f" containerID="2a6bff9f333be0a2cf8acfb42968d8d056c6429dbedccdbff928162bff73ff8f" exitCode=0 Sep 29 20:19:03 crc kubenswrapper[4792]: I0929 20:19:03.960024 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-t8fbp" Sep 29 20:19:03 crc kubenswrapper[4792]: I0929 20:19:03.960024 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-t8fbp" event={"ID":"03849d44-e42b-4a12-8667-d7d98590e32f","Type":"ContainerDied","Data":"2a6bff9f333be0a2cf8acfb42968d8d056c6429dbedccdbff928162bff73ff8f"} Sep 29 20:19:03 crc kubenswrapper[4792]: I0929 20:19:03.960223 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-t8fbp" event={"ID":"03849d44-e42b-4a12-8667-d7d98590e32f","Type":"ContainerDied","Data":"ff0b0972ebffad53c3a8cd7475371b3c338809dd23f2fcb4128371615af63928"} Sep 29 20:19:03 crc kubenswrapper[4792]: I0929 20:19:03.960241 4792 scope.go:117] "RemoveContainer" containerID="2a6bff9f333be0a2cf8acfb42968d8d056c6429dbedccdbff928162bff73ff8f" Sep 29 20:19:04 crc kubenswrapper[4792]: I0929 20:19:04.000205 4792 scope.go:117] "RemoveContainer" containerID="e113bdf7b769e0e7f41bd661bd3584c23dd619bed98dbbfc169495ade23896e4" Sep 29 20:19:04 crc kubenswrapper[4792]: I0929 20:19:04.003961 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-t8fbp"] Sep 29 20:19:04 crc kubenswrapper[4792]: I0929 20:19:04.014640 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-t8fbp"] Sep 29 20:19:04 crc kubenswrapper[4792]: I0929 20:19:04.025819 4792 scope.go:117] "RemoveContainer" containerID="4bc0cb92723b2a2566a27ffbca551d7de705d59c55237fb490c91965e0e7d6af" Sep 29 20:19:04 crc kubenswrapper[4792]: I0929 20:19:04.062964 4792 scope.go:117] "RemoveContainer" containerID="2a6bff9f333be0a2cf8acfb42968d8d056c6429dbedccdbff928162bff73ff8f" Sep 29 20:19:04 crc kubenswrapper[4792]: E0929 20:19:04.063356 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2a6bff9f333be0a2cf8acfb42968d8d056c6429dbedccdbff928162bff73ff8f\": container with ID starting with 2a6bff9f333be0a2cf8acfb42968d8d056c6429dbedccdbff928162bff73ff8f not found: ID does not exist" containerID="2a6bff9f333be0a2cf8acfb42968d8d056c6429dbedccdbff928162bff73ff8f" Sep 29 20:19:04 crc kubenswrapper[4792]: I0929 20:19:04.063451 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2a6bff9f333be0a2cf8acfb42968d8d056c6429dbedccdbff928162bff73ff8f"} err="failed to get container status \"2a6bff9f333be0a2cf8acfb42968d8d056c6429dbedccdbff928162bff73ff8f\": rpc error: code = NotFound desc = could not find container \"2a6bff9f333be0a2cf8acfb42968d8d056c6429dbedccdbff928162bff73ff8f\": container with ID starting with 2a6bff9f333be0a2cf8acfb42968d8d056c6429dbedccdbff928162bff73ff8f not found: ID does not exist" Sep 29 20:19:04 crc kubenswrapper[4792]: I0929 20:19:04.063543 4792 scope.go:117] "RemoveContainer" containerID="e113bdf7b769e0e7f41bd661bd3584c23dd619bed98dbbfc169495ade23896e4" Sep 29 20:19:04 crc kubenswrapper[4792]: E0929 20:19:04.063893 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e113bdf7b769e0e7f41bd661bd3584c23dd619bed98dbbfc169495ade23896e4\": container with ID starting with e113bdf7b769e0e7f41bd661bd3584c23dd619bed98dbbfc169495ade23896e4 not found: ID does not exist" containerID="e113bdf7b769e0e7f41bd661bd3584c23dd619bed98dbbfc169495ade23896e4" Sep 29 20:19:04 crc kubenswrapper[4792]: I0929 20:19:04.063943 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e113bdf7b769e0e7f41bd661bd3584c23dd619bed98dbbfc169495ade23896e4"} err="failed to get container status \"e113bdf7b769e0e7f41bd661bd3584c23dd619bed98dbbfc169495ade23896e4\": rpc error: code = NotFound desc = could not find container \"e113bdf7b769e0e7f41bd661bd3584c23dd619bed98dbbfc169495ade23896e4\": container with ID starting with e113bdf7b769e0e7f41bd661bd3584c23dd619bed98dbbfc169495ade23896e4 not found: ID does not exist" Sep 29 20:19:04 crc kubenswrapper[4792]: I0929 20:19:04.063965 4792 scope.go:117] "RemoveContainer" containerID="4bc0cb92723b2a2566a27ffbca551d7de705d59c55237fb490c91965e0e7d6af" Sep 29 20:19:04 crc kubenswrapper[4792]: E0929 20:19:04.064180 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4bc0cb92723b2a2566a27ffbca551d7de705d59c55237fb490c91965e0e7d6af\": container with ID starting with 4bc0cb92723b2a2566a27ffbca551d7de705d59c55237fb490c91965e0e7d6af not found: ID does not exist" containerID="4bc0cb92723b2a2566a27ffbca551d7de705d59c55237fb490c91965e0e7d6af" Sep 29 20:19:04 crc kubenswrapper[4792]: I0929 20:19:04.064204 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4bc0cb92723b2a2566a27ffbca551d7de705d59c55237fb490c91965e0e7d6af"} err="failed to get container status \"4bc0cb92723b2a2566a27ffbca551d7de705d59c55237fb490c91965e0e7d6af\": rpc error: code = NotFound desc = could not find container \"4bc0cb92723b2a2566a27ffbca551d7de705d59c55237fb490c91965e0e7d6af\": container with ID starting with 4bc0cb92723b2a2566a27ffbca551d7de705d59c55237fb490c91965e0e7d6af not found: ID does not exist" Sep 29 20:19:05 crc kubenswrapper[4792]: I0929 20:19:05.025596 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="03849d44-e42b-4a12-8667-d7d98590e32f" path="/var/lib/kubelet/pods/03849d44-e42b-4a12-8667-d7d98590e32f/volumes" Sep 29 20:19:07 crc kubenswrapper[4792]: I0929 20:19:07.996957 4792 generic.go:334] "Generic (PLEG): container finished" podID="d7758184-18f7-4a86-9643-9efcbc3a829e" containerID="57c5d7bc52cb4b6c598309bd8cd2c8764f58fca44416068346e91843b2cc1de7" exitCode=0 Sep 29 20:19:07 crc kubenswrapper[4792]: I0929 20:19:07.997036 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-9nm55/crc-debug-k8qk9" event={"ID":"d7758184-18f7-4a86-9643-9efcbc3a829e","Type":"ContainerDied","Data":"57c5d7bc52cb4b6c598309bd8cd2c8764f58fca44416068346e91843b2cc1de7"} Sep 29 20:19:09 crc kubenswrapper[4792]: I0929 20:19:09.107759 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-9nm55/crc-debug-k8qk9" Sep 29 20:19:09 crc kubenswrapper[4792]: I0929 20:19:09.136312 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-9nm55/crc-debug-k8qk9"] Sep 29 20:19:09 crc kubenswrapper[4792]: I0929 20:19:09.145898 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-9nm55/crc-debug-k8qk9"] Sep 29 20:19:09 crc kubenswrapper[4792]: I0929 20:19:09.226137 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/d7758184-18f7-4a86-9643-9efcbc3a829e-host\") pod \"d7758184-18f7-4a86-9643-9efcbc3a829e\" (UID: \"d7758184-18f7-4a86-9643-9efcbc3a829e\") " Sep 29 20:19:09 crc kubenswrapper[4792]: I0929 20:19:09.226498 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tnb7j\" (UniqueName: \"kubernetes.io/projected/d7758184-18f7-4a86-9643-9efcbc3a829e-kube-api-access-tnb7j\") pod \"d7758184-18f7-4a86-9643-9efcbc3a829e\" (UID: \"d7758184-18f7-4a86-9643-9efcbc3a829e\") " Sep 29 20:19:09 crc kubenswrapper[4792]: I0929 20:19:09.239184 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/d7758184-18f7-4a86-9643-9efcbc3a829e-host" (OuterVolumeSpecName: "host") pod "d7758184-18f7-4a86-9643-9efcbc3a829e" (UID: "d7758184-18f7-4a86-9643-9efcbc3a829e"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 29 20:19:09 crc kubenswrapper[4792]: I0929 20:19:09.254904 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d7758184-18f7-4a86-9643-9efcbc3a829e-kube-api-access-tnb7j" (OuterVolumeSpecName: "kube-api-access-tnb7j") pod "d7758184-18f7-4a86-9643-9efcbc3a829e" (UID: "d7758184-18f7-4a86-9643-9efcbc3a829e"). InnerVolumeSpecName "kube-api-access-tnb7j". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 20:19:09 crc kubenswrapper[4792]: I0929 20:19:09.329004 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tnb7j\" (UniqueName: \"kubernetes.io/projected/d7758184-18f7-4a86-9643-9efcbc3a829e-kube-api-access-tnb7j\") on node \"crc\" DevicePath \"\"" Sep 29 20:19:09 crc kubenswrapper[4792]: I0929 20:19:09.329035 4792 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/d7758184-18f7-4a86-9643-9efcbc3a829e-host\") on node \"crc\" DevicePath \"\"" Sep 29 20:19:10 crc kubenswrapper[4792]: I0929 20:19:10.012537 4792 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ba885e4ac91df7470560b1825b95fac5a9bebbe75cbc8e497c71d141b8f981b8" Sep 29 20:19:10 crc kubenswrapper[4792]: I0929 20:19:10.012593 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-9nm55/crc-debug-k8qk9" Sep 29 20:19:10 crc kubenswrapper[4792]: I0929 20:19:10.341647 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-9nm55/crc-debug-mzlfw"] Sep 29 20:19:10 crc kubenswrapper[4792]: E0929 20:19:10.342007 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="03849d44-e42b-4a12-8667-d7d98590e32f" containerName="extract-utilities" Sep 29 20:19:10 crc kubenswrapper[4792]: I0929 20:19:10.342018 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="03849d44-e42b-4a12-8667-d7d98590e32f" containerName="extract-utilities" Sep 29 20:19:10 crc kubenswrapper[4792]: E0929 20:19:10.342037 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d7758184-18f7-4a86-9643-9efcbc3a829e" containerName="container-00" Sep 29 20:19:10 crc kubenswrapper[4792]: I0929 20:19:10.342044 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="d7758184-18f7-4a86-9643-9efcbc3a829e" containerName="container-00" Sep 29 20:19:10 crc kubenswrapper[4792]: E0929 20:19:10.342058 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="03849d44-e42b-4a12-8667-d7d98590e32f" containerName="extract-content" Sep 29 20:19:10 crc kubenswrapper[4792]: I0929 20:19:10.342063 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="03849d44-e42b-4a12-8667-d7d98590e32f" containerName="extract-content" Sep 29 20:19:10 crc kubenswrapper[4792]: E0929 20:19:10.342095 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="03849d44-e42b-4a12-8667-d7d98590e32f" containerName="registry-server" Sep 29 20:19:10 crc kubenswrapper[4792]: I0929 20:19:10.342102 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="03849d44-e42b-4a12-8667-d7d98590e32f" containerName="registry-server" Sep 29 20:19:10 crc kubenswrapper[4792]: I0929 20:19:10.342267 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="03849d44-e42b-4a12-8667-d7d98590e32f" containerName="registry-server" Sep 29 20:19:10 crc kubenswrapper[4792]: I0929 20:19:10.342301 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="d7758184-18f7-4a86-9643-9efcbc3a829e" containerName="container-00" Sep 29 20:19:10 crc kubenswrapper[4792]: I0929 20:19:10.342819 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-9nm55/crc-debug-mzlfw" Sep 29 20:19:10 crc kubenswrapper[4792]: I0929 20:19:10.344533 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-must-gather-9nm55"/"default-dockercfg-xj8hg" Sep 29 20:19:10 crc kubenswrapper[4792]: I0929 20:19:10.449155 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/dc46a911-3298-4aa9-989b-f1688c3eb719-host\") pod \"crc-debug-mzlfw\" (UID: \"dc46a911-3298-4aa9-989b-f1688c3eb719\") " pod="openshift-must-gather-9nm55/crc-debug-mzlfw" Sep 29 20:19:10 crc kubenswrapper[4792]: I0929 20:19:10.449253 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k857s\" (UniqueName: \"kubernetes.io/projected/dc46a911-3298-4aa9-989b-f1688c3eb719-kube-api-access-k857s\") pod \"crc-debug-mzlfw\" (UID: \"dc46a911-3298-4aa9-989b-f1688c3eb719\") " pod="openshift-must-gather-9nm55/crc-debug-mzlfw" Sep 29 20:19:10 crc kubenswrapper[4792]: I0929 20:19:10.552158 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/dc46a911-3298-4aa9-989b-f1688c3eb719-host\") pod \"crc-debug-mzlfw\" (UID: \"dc46a911-3298-4aa9-989b-f1688c3eb719\") " pod="openshift-must-gather-9nm55/crc-debug-mzlfw" Sep 29 20:19:10 crc kubenswrapper[4792]: I0929 20:19:10.552258 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k857s\" (UniqueName: \"kubernetes.io/projected/dc46a911-3298-4aa9-989b-f1688c3eb719-kube-api-access-k857s\") pod \"crc-debug-mzlfw\" (UID: \"dc46a911-3298-4aa9-989b-f1688c3eb719\") " pod="openshift-must-gather-9nm55/crc-debug-mzlfw" Sep 29 20:19:10 crc kubenswrapper[4792]: I0929 20:19:10.552671 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/dc46a911-3298-4aa9-989b-f1688c3eb719-host\") pod \"crc-debug-mzlfw\" (UID: \"dc46a911-3298-4aa9-989b-f1688c3eb719\") " pod="openshift-must-gather-9nm55/crc-debug-mzlfw" Sep 29 20:19:10 crc kubenswrapper[4792]: I0929 20:19:10.569360 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k857s\" (UniqueName: \"kubernetes.io/projected/dc46a911-3298-4aa9-989b-f1688c3eb719-kube-api-access-k857s\") pod \"crc-debug-mzlfw\" (UID: \"dc46a911-3298-4aa9-989b-f1688c3eb719\") " pod="openshift-must-gather-9nm55/crc-debug-mzlfw" Sep 29 20:19:10 crc kubenswrapper[4792]: I0929 20:19:10.655633 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-9nm55/crc-debug-mzlfw" Sep 29 20:19:11 crc kubenswrapper[4792]: I0929 20:19:11.025353 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d7758184-18f7-4a86-9643-9efcbc3a829e" path="/var/lib/kubelet/pods/d7758184-18f7-4a86-9643-9efcbc3a829e/volumes" Sep 29 20:19:11 crc kubenswrapper[4792]: I0929 20:19:11.026091 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-9nm55/crc-debug-mzlfw" event={"ID":"dc46a911-3298-4aa9-989b-f1688c3eb719","Type":"ContainerStarted","Data":"4a01fb10c9b891357a40718ec8f274e31f40fc06c8e50f14342756c03ade4018"} Sep 29 20:19:11 crc kubenswrapper[4792]: I0929 20:19:11.026122 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-9nm55/crc-debug-mzlfw" event={"ID":"dc46a911-3298-4aa9-989b-f1688c3eb719","Type":"ContainerStarted","Data":"3bf6736ae362de21d6b341bdf13737a82d94d27250ac3ada58bb00cb26b81c6d"} Sep 29 20:19:11 crc kubenswrapper[4792]: I0929 20:19:11.041643 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-9nm55/crc-debug-mzlfw" podStartSLOduration=1.04162236 podStartE2EDuration="1.04162236s" podCreationTimestamp="2025-09-29 20:19:10 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 20:19:11.037509172 +0000 UTC m=+4963.030816568" watchObservedRunningTime="2025-09-29 20:19:11.04162236 +0000 UTC m=+4963.034929756" Sep 29 20:19:12 crc kubenswrapper[4792]: I0929 20:19:12.033710 4792 generic.go:334] "Generic (PLEG): container finished" podID="dc46a911-3298-4aa9-989b-f1688c3eb719" containerID="4a01fb10c9b891357a40718ec8f274e31f40fc06c8e50f14342756c03ade4018" exitCode=0 Sep 29 20:19:12 crc kubenswrapper[4792]: I0929 20:19:12.034007 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-9nm55/crc-debug-mzlfw" event={"ID":"dc46a911-3298-4aa9-989b-f1688c3eb719","Type":"ContainerDied","Data":"4a01fb10c9b891357a40718ec8f274e31f40fc06c8e50f14342756c03ade4018"} Sep 29 20:19:13 crc kubenswrapper[4792]: I0929 20:19:13.140572 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-9nm55/crc-debug-mzlfw" Sep 29 20:19:13 crc kubenswrapper[4792]: I0929 20:19:13.203671 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-k857s\" (UniqueName: \"kubernetes.io/projected/dc46a911-3298-4aa9-989b-f1688c3eb719-kube-api-access-k857s\") pod \"dc46a911-3298-4aa9-989b-f1688c3eb719\" (UID: \"dc46a911-3298-4aa9-989b-f1688c3eb719\") " Sep 29 20:19:13 crc kubenswrapper[4792]: I0929 20:19:13.203769 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/dc46a911-3298-4aa9-989b-f1688c3eb719-host\") pod \"dc46a911-3298-4aa9-989b-f1688c3eb719\" (UID: \"dc46a911-3298-4aa9-989b-f1688c3eb719\") " Sep 29 20:19:13 crc kubenswrapper[4792]: I0929 20:19:13.203869 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/dc46a911-3298-4aa9-989b-f1688c3eb719-host" (OuterVolumeSpecName: "host") pod "dc46a911-3298-4aa9-989b-f1688c3eb719" (UID: "dc46a911-3298-4aa9-989b-f1688c3eb719"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 29 20:19:13 crc kubenswrapper[4792]: I0929 20:19:13.204481 4792 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/dc46a911-3298-4aa9-989b-f1688c3eb719-host\") on node \"crc\" DevicePath \"\"" Sep 29 20:19:13 crc kubenswrapper[4792]: I0929 20:19:13.222448 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dc46a911-3298-4aa9-989b-f1688c3eb719-kube-api-access-k857s" (OuterVolumeSpecName: "kube-api-access-k857s") pod "dc46a911-3298-4aa9-989b-f1688c3eb719" (UID: "dc46a911-3298-4aa9-989b-f1688c3eb719"). InnerVolumeSpecName "kube-api-access-k857s". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 20:19:13 crc kubenswrapper[4792]: I0929 20:19:13.305510 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-k857s\" (UniqueName: \"kubernetes.io/projected/dc46a911-3298-4aa9-989b-f1688c3eb719-kube-api-access-k857s\") on node \"crc\" DevicePath \"\"" Sep 29 20:19:14 crc kubenswrapper[4792]: I0929 20:19:14.055262 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-9nm55/crc-debug-mzlfw" Sep 29 20:19:14 crc kubenswrapper[4792]: I0929 20:19:14.055623 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-9nm55/crc-debug-mzlfw" event={"ID":"dc46a911-3298-4aa9-989b-f1688c3eb719","Type":"ContainerDied","Data":"3bf6736ae362de21d6b341bdf13737a82d94d27250ac3ada58bb00cb26b81c6d"} Sep 29 20:19:14 crc kubenswrapper[4792]: I0929 20:19:14.055731 4792 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3bf6736ae362de21d6b341bdf13737a82d94d27250ac3ada58bb00cb26b81c6d" Sep 29 20:19:17 crc kubenswrapper[4792]: I0929 20:19:17.852656 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-9nm55/crc-debug-mzlfw"] Sep 29 20:19:17 crc kubenswrapper[4792]: I0929 20:19:17.859198 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-9nm55/crc-debug-mzlfw"] Sep 29 20:19:19 crc kubenswrapper[4792]: I0929 20:19:19.025493 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="dc46a911-3298-4aa9-989b-f1688c3eb719" path="/var/lib/kubelet/pods/dc46a911-3298-4aa9-989b-f1688c3eb719/volumes" Sep 29 20:19:19 crc kubenswrapper[4792]: I0929 20:19:19.026207 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-9nm55/crc-debug-vt8g5"] Sep 29 20:19:19 crc kubenswrapper[4792]: E0929 20:19:19.026519 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dc46a911-3298-4aa9-989b-f1688c3eb719" containerName="container-00" Sep 29 20:19:19 crc kubenswrapper[4792]: I0929 20:19:19.026535 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="dc46a911-3298-4aa9-989b-f1688c3eb719" containerName="container-00" Sep 29 20:19:19 crc kubenswrapper[4792]: I0929 20:19:19.026744 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="dc46a911-3298-4aa9-989b-f1688c3eb719" containerName="container-00" Sep 29 20:19:19 crc kubenswrapper[4792]: I0929 20:19:19.027376 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-9nm55/crc-debug-vt8g5" Sep 29 20:19:19 crc kubenswrapper[4792]: I0929 20:19:19.029820 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-must-gather-9nm55"/"default-dockercfg-xj8hg" Sep 29 20:19:19 crc kubenswrapper[4792]: I0929 20:19:19.111118 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vzrcx\" (UniqueName: \"kubernetes.io/projected/383b2cfa-8a7f-4a6c-a2a2-b6732603d780-kube-api-access-vzrcx\") pod \"crc-debug-vt8g5\" (UID: \"383b2cfa-8a7f-4a6c-a2a2-b6732603d780\") " pod="openshift-must-gather-9nm55/crc-debug-vt8g5" Sep 29 20:19:19 crc kubenswrapper[4792]: I0929 20:19:19.112065 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/383b2cfa-8a7f-4a6c-a2a2-b6732603d780-host\") pod \"crc-debug-vt8g5\" (UID: \"383b2cfa-8a7f-4a6c-a2a2-b6732603d780\") " pod="openshift-must-gather-9nm55/crc-debug-vt8g5" Sep 29 20:19:19 crc kubenswrapper[4792]: I0929 20:19:19.214255 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vzrcx\" (UniqueName: \"kubernetes.io/projected/383b2cfa-8a7f-4a6c-a2a2-b6732603d780-kube-api-access-vzrcx\") pod \"crc-debug-vt8g5\" (UID: \"383b2cfa-8a7f-4a6c-a2a2-b6732603d780\") " pod="openshift-must-gather-9nm55/crc-debug-vt8g5" Sep 29 20:19:19 crc kubenswrapper[4792]: I0929 20:19:19.214408 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/383b2cfa-8a7f-4a6c-a2a2-b6732603d780-host\") pod \"crc-debug-vt8g5\" (UID: \"383b2cfa-8a7f-4a6c-a2a2-b6732603d780\") " pod="openshift-must-gather-9nm55/crc-debug-vt8g5" Sep 29 20:19:19 crc kubenswrapper[4792]: I0929 20:19:19.214527 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/383b2cfa-8a7f-4a6c-a2a2-b6732603d780-host\") pod \"crc-debug-vt8g5\" (UID: \"383b2cfa-8a7f-4a6c-a2a2-b6732603d780\") " pod="openshift-must-gather-9nm55/crc-debug-vt8g5" Sep 29 20:19:19 crc kubenswrapper[4792]: I0929 20:19:19.243758 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vzrcx\" (UniqueName: \"kubernetes.io/projected/383b2cfa-8a7f-4a6c-a2a2-b6732603d780-kube-api-access-vzrcx\") pod \"crc-debug-vt8g5\" (UID: \"383b2cfa-8a7f-4a6c-a2a2-b6732603d780\") " pod="openshift-must-gather-9nm55/crc-debug-vt8g5" Sep 29 20:19:19 crc kubenswrapper[4792]: I0929 20:19:19.345034 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-9nm55/crc-debug-vt8g5" Sep 29 20:19:20 crc kubenswrapper[4792]: I0929 20:19:20.107952 4792 generic.go:334] "Generic (PLEG): container finished" podID="383b2cfa-8a7f-4a6c-a2a2-b6732603d780" containerID="4eff5ca2098c0fe43e97b3577a6fd67a7b1d87c310bb1ee32126036d309618e3" exitCode=0 Sep 29 20:19:20 crc kubenswrapper[4792]: I0929 20:19:20.108009 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-9nm55/crc-debug-vt8g5" event={"ID":"383b2cfa-8a7f-4a6c-a2a2-b6732603d780","Type":"ContainerDied","Data":"4eff5ca2098c0fe43e97b3577a6fd67a7b1d87c310bb1ee32126036d309618e3"} Sep 29 20:19:20 crc kubenswrapper[4792]: I0929 20:19:20.108565 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-9nm55/crc-debug-vt8g5" event={"ID":"383b2cfa-8a7f-4a6c-a2a2-b6732603d780","Type":"ContainerStarted","Data":"68172dafba3a763b014a2c06be77d2401dcf5c0d8cb4a95b9af539043a0fa645"} Sep 29 20:19:20 crc kubenswrapper[4792]: I0929 20:19:20.146273 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-9nm55/crc-debug-vt8g5"] Sep 29 20:19:20 crc kubenswrapper[4792]: I0929 20:19:20.154517 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-9nm55/crc-debug-vt8g5"] Sep 29 20:19:21 crc kubenswrapper[4792]: I0929 20:19:21.235169 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-9nm55/crc-debug-vt8g5" Sep 29 20:19:21 crc kubenswrapper[4792]: I0929 20:19:21.370199 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vzrcx\" (UniqueName: \"kubernetes.io/projected/383b2cfa-8a7f-4a6c-a2a2-b6732603d780-kube-api-access-vzrcx\") pod \"383b2cfa-8a7f-4a6c-a2a2-b6732603d780\" (UID: \"383b2cfa-8a7f-4a6c-a2a2-b6732603d780\") " Sep 29 20:19:21 crc kubenswrapper[4792]: I0929 20:19:21.370403 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/383b2cfa-8a7f-4a6c-a2a2-b6732603d780-host\") pod \"383b2cfa-8a7f-4a6c-a2a2-b6732603d780\" (UID: \"383b2cfa-8a7f-4a6c-a2a2-b6732603d780\") " Sep 29 20:19:21 crc kubenswrapper[4792]: I0929 20:19:21.370891 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/383b2cfa-8a7f-4a6c-a2a2-b6732603d780-host" (OuterVolumeSpecName: "host") pod "383b2cfa-8a7f-4a6c-a2a2-b6732603d780" (UID: "383b2cfa-8a7f-4a6c-a2a2-b6732603d780"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 29 20:19:21 crc kubenswrapper[4792]: I0929 20:19:21.376292 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/383b2cfa-8a7f-4a6c-a2a2-b6732603d780-kube-api-access-vzrcx" (OuterVolumeSpecName: "kube-api-access-vzrcx") pod "383b2cfa-8a7f-4a6c-a2a2-b6732603d780" (UID: "383b2cfa-8a7f-4a6c-a2a2-b6732603d780"). InnerVolumeSpecName "kube-api-access-vzrcx". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 20:19:21 crc kubenswrapper[4792]: I0929 20:19:21.472720 4792 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/383b2cfa-8a7f-4a6c-a2a2-b6732603d780-host\") on node \"crc\" DevicePath \"\"" Sep 29 20:19:21 crc kubenswrapper[4792]: I0929 20:19:21.472745 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vzrcx\" (UniqueName: \"kubernetes.io/projected/383b2cfa-8a7f-4a6c-a2a2-b6732603d780-kube-api-access-vzrcx\") on node \"crc\" DevicePath \"\"" Sep 29 20:19:22 crc kubenswrapper[4792]: I0929 20:19:22.128923 4792 scope.go:117] "RemoveContainer" containerID="4eff5ca2098c0fe43e97b3577a6fd67a7b1d87c310bb1ee32126036d309618e3" Sep 29 20:19:22 crc kubenswrapper[4792]: I0929 20:19:22.129169 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-9nm55/crc-debug-vt8g5" Sep 29 20:19:23 crc kubenswrapper[4792]: I0929 20:19:23.025592 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="383b2cfa-8a7f-4a6c-a2a2-b6732603d780" path="/var/lib/kubelet/pods/383b2cfa-8a7f-4a6c-a2a2-b6732603d780/volumes" Sep 29 20:19:23 crc kubenswrapper[4792]: I0929 20:19:23.818353 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_2a375d0d1c57dc6ca715bf6094605ae633b88b22c8703fb4672d18d3ca9jqrr_0c02843e-3b00-4af6-8f78-d46dc77f427d/util/0.log" Sep 29 20:19:24 crc kubenswrapper[4792]: I0929 20:19:24.055505 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_2a375d0d1c57dc6ca715bf6094605ae633b88b22c8703fb4672d18d3ca9jqrr_0c02843e-3b00-4af6-8f78-d46dc77f427d/pull/0.log" Sep 29 20:19:24 crc kubenswrapper[4792]: I0929 20:19:24.057682 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_2a375d0d1c57dc6ca715bf6094605ae633b88b22c8703fb4672d18d3ca9jqrr_0c02843e-3b00-4af6-8f78-d46dc77f427d/util/0.log" Sep 29 20:19:24 crc kubenswrapper[4792]: I0929 20:19:24.088543 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_2a375d0d1c57dc6ca715bf6094605ae633b88b22c8703fb4672d18d3ca9jqrr_0c02843e-3b00-4af6-8f78-d46dc77f427d/pull/0.log" Sep 29 20:19:24 crc kubenswrapper[4792]: I0929 20:19:24.246518 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_2a375d0d1c57dc6ca715bf6094605ae633b88b22c8703fb4672d18d3ca9jqrr_0c02843e-3b00-4af6-8f78-d46dc77f427d/pull/0.log" Sep 29 20:19:24 crc kubenswrapper[4792]: I0929 20:19:24.246802 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_2a375d0d1c57dc6ca715bf6094605ae633b88b22c8703fb4672d18d3ca9jqrr_0c02843e-3b00-4af6-8f78-d46dc77f427d/extract/0.log" Sep 29 20:19:24 crc kubenswrapper[4792]: I0929 20:19:24.262425 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_2a375d0d1c57dc6ca715bf6094605ae633b88b22c8703fb4672d18d3ca9jqrr_0c02843e-3b00-4af6-8f78-d46dc77f427d/util/0.log" Sep 29 20:19:24 crc kubenswrapper[4792]: I0929 20:19:24.438604 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-6ff8b75857-c6z65_ee957b59-f5b6-4306-b6a7-4550199fe910/kube-rbac-proxy/0.log" Sep 29 20:19:24 crc kubenswrapper[4792]: I0929 20:19:24.485326 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-6ff8b75857-c6z65_ee957b59-f5b6-4306-b6a7-4550199fe910/manager/0.log" Sep 29 20:19:24 crc kubenswrapper[4792]: I0929 20:19:24.953878 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-644bddb6d8-vjkgm_7f29d397-4b2d-4668-91f6-744e22070f30/kube-rbac-proxy/0.log" Sep 29 20:19:25 crc kubenswrapper[4792]: I0929 20:19:25.083133 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-644bddb6d8-vjkgm_7f29d397-4b2d-4668-91f6-744e22070f30/manager/0.log" Sep 29 20:19:25 crc kubenswrapper[4792]: I0929 20:19:25.104819 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-84f4f7b77b-h9csw_aa102219-aaa4-46c5-b783-519972688523/kube-rbac-proxy/0.log" Sep 29 20:19:25 crc kubenswrapper[4792]: I0929 20:19:25.209658 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-84f4f7b77b-h9csw_aa102219-aaa4-46c5-b783-519972688523/manager/0.log" Sep 29 20:19:25 crc kubenswrapper[4792]: I0929 20:19:25.363694 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-84958c4d49-j2crr_bd8fdc17-d2f2-4644-8789-c8188f91ce61/kube-rbac-proxy/0.log" Sep 29 20:19:25 crc kubenswrapper[4792]: I0929 20:19:25.369935 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-84958c4d49-j2crr_bd8fdc17-d2f2-4644-8789-c8188f91ce61/manager/0.log" Sep 29 20:19:25 crc kubenswrapper[4792]: I0929 20:19:25.477516 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-5d889d78cf-fcg79_1c191b6e-d1aa-4576-98da-db7178aed835/kube-rbac-proxy/0.log" Sep 29 20:19:25 crc kubenswrapper[4792]: I0929 20:19:25.612396 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-5d889d78cf-fcg79_1c191b6e-d1aa-4576-98da-db7178aed835/manager/0.log" Sep 29 20:19:25 crc kubenswrapper[4792]: I0929 20:19:25.651822 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-9f4696d94-d48vc_1d9cd325-38fc-4c7c-bd2f-51b86aa23d2e/kube-rbac-proxy/0.log" Sep 29 20:19:25 crc kubenswrapper[4792]: I0929 20:19:25.702581 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-9f4696d94-d48vc_1d9cd325-38fc-4c7c-bd2f-51b86aa23d2e/manager/0.log" Sep 29 20:19:26 crc kubenswrapper[4792]: I0929 20:19:26.502032 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-7d857cc749-fn8fk_ca7e36bc-4aa5-414f-92a4-db59399217b9/kube-rbac-proxy/0.log" Sep 29 20:19:26 crc kubenswrapper[4792]: I0929 20:19:26.609376 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-7d857cc749-fn8fk_ca7e36bc-4aa5-414f-92a4-db59399217b9/manager/0.log" Sep 29 20:19:26 crc kubenswrapper[4792]: I0929 20:19:26.692081 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-7975b88857-nfk4r_fdf51eb0-6b5f-43ad-ba01-c8ff12508dc0/manager/0.log" Sep 29 20:19:26 crc kubenswrapper[4792]: I0929 20:19:26.704098 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-7975b88857-nfk4r_fdf51eb0-6b5f-43ad-ba01-c8ff12508dc0/kube-rbac-proxy/0.log" Sep 29 20:19:26 crc kubenswrapper[4792]: I0929 20:19:26.803750 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-5bd55b4bff-swj98_b9148442-b4dc-4926-920d-33c9a00172fa/kube-rbac-proxy/0.log" Sep 29 20:19:26 crc kubenswrapper[4792]: I0929 20:19:26.975241 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-6d68dbc695-zzmf2_49160b59-f488-40f9-b23d-a3bccc3c2cb9/kube-rbac-proxy/0.log" Sep 29 20:19:26 crc kubenswrapper[4792]: I0929 20:19:26.995253 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-5bd55b4bff-swj98_b9148442-b4dc-4926-920d-33c9a00172fa/manager/0.log" Sep 29 20:19:27 crc kubenswrapper[4792]: I0929 20:19:27.020521 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-6d68dbc695-zzmf2_49160b59-f488-40f9-b23d-a3bccc3c2cb9/manager/0.log" Sep 29 20:19:27 crc kubenswrapper[4792]: I0929 20:19:27.186722 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-88c7-t4srt_af042430-9b25-44c8-8f30-19db90025d05/kube-rbac-proxy/0.log" Sep 29 20:19:27 crc kubenswrapper[4792]: I0929 20:19:27.257316 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-88c7-t4srt_af042430-9b25-44c8-8f30-19db90025d05/manager/0.log" Sep 29 20:19:27 crc kubenswrapper[4792]: I0929 20:19:27.411319 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-64d7b59854-rrnhb_020f5851-2dbc-464b-9217-6a3cb7a737a7/kube-rbac-proxy/0.log" Sep 29 20:19:27 crc kubenswrapper[4792]: I0929 20:19:27.418403 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-64d7b59854-rrnhb_020f5851-2dbc-464b-9217-6a3cb7a737a7/manager/0.log" Sep 29 20:19:27 crc kubenswrapper[4792]: I0929 20:19:27.472545 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-c7c776c96-4dfhr_12482564-55ba-46c6-857c-de815cddedc7/kube-rbac-proxy/0.log" Sep 29 20:19:27 crc kubenswrapper[4792]: I0929 20:19:27.566173 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-c7c776c96-4dfhr_12482564-55ba-46c6-857c-de815cddedc7/manager/0.log" Sep 29 20:19:27 crc kubenswrapper[4792]: I0929 20:19:27.629403 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-76fcc6dc7c-9wbkb_dd149347-201c-4ce2-abdd-d41e57d1813a/kube-rbac-proxy/0.log" Sep 29 20:19:27 crc kubenswrapper[4792]: I0929 20:19:27.736502 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-76fcc6dc7c-9wbkb_dd149347-201c-4ce2-abdd-d41e57d1813a/manager/0.log" Sep 29 20:19:27 crc kubenswrapper[4792]: I0929 20:19:27.865702 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-6d776955-44cz7_c0dd6d9d-3f07-4723-ae97-7adb0a4863b1/manager/0.log" Sep 29 20:19:27 crc kubenswrapper[4792]: I0929 20:19:27.866337 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-6d776955-44cz7_c0dd6d9d-3f07-4723-ae97-7adb0a4863b1/kube-rbac-proxy/0.log" Sep 29 20:19:27 crc kubenswrapper[4792]: I0929 20:19:27.927035 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-manager-57cc59b9c6-9f256_5e2b0240-3697-4ee1-9052-5e72c8bf386a/kube-rbac-proxy/0.log" Sep 29 20:19:28 crc kubenswrapper[4792]: I0929 20:19:28.144361 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-operator-7f7c575847-m64w4_28b981e4-ec59-452c-950d-2b86f346df10/kube-rbac-proxy/0.log" Sep 29 20:19:28 crc kubenswrapper[4792]: I0929 20:19:28.347978 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-index-9z9w7_b589f424-730a-4e38-8dfd-c1229f055e2a/registry-server/0.log" Sep 29 20:19:28 crc kubenswrapper[4792]: I0929 20:19:28.374286 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-operator-7f7c575847-m64w4_28b981e4-ec59-452c-950d-2b86f346df10/operator/0.log" Sep 29 20:19:28 crc kubenswrapper[4792]: I0929 20:19:28.429211 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-9976ff44c-zc87x_30ffe357-8b65-4481-95f2-7b2e13fd5676/kube-rbac-proxy/0.log" Sep 29 20:19:28 crc kubenswrapper[4792]: I0929 20:19:28.670586 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-9976ff44c-zc87x_30ffe357-8b65-4481-95f2-7b2e13fd5676/manager/0.log" Sep 29 20:19:28 crc kubenswrapper[4792]: I0929 20:19:28.711084 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-589c58c6c-f8qhj_18b79acc-6db2-4b4f-8f85-0b65dfd800b3/kube-rbac-proxy/0.log" Sep 29 20:19:28 crc kubenswrapper[4792]: I0929 20:19:28.835009 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-589c58c6c-f8qhj_18b79acc-6db2-4b4f-8f85-0b65dfd800b3/manager/0.log" Sep 29 20:19:29 crc kubenswrapper[4792]: I0929 20:19:29.010191 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_rabbitmq-cluster-operator-manager-79d8469568-vxkfn_5bc872e6-ce23-49cc-8ae7-bf92e4edda47/operator/0.log" Sep 29 20:19:29 crc kubenswrapper[4792]: I0929 20:19:29.130544 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-bc7dc7bd9-4p4rl_5be754f6-b295-4ca1-8f47-5a827e39580a/kube-rbac-proxy/0.log" Sep 29 20:19:29 crc kubenswrapper[4792]: I0929 20:19:29.135400 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-manager-57cc59b9c6-9f256_5e2b0240-3697-4ee1-9052-5e72c8bf386a/manager/0.log" Sep 29 20:19:29 crc kubenswrapper[4792]: I0929 20:19:29.169161 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-bc7dc7bd9-4p4rl_5be754f6-b295-4ca1-8f47-5a827e39580a/manager/0.log" Sep 29 20:19:29 crc kubenswrapper[4792]: I0929 20:19:29.262341 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-b8d54b5d7-x5h9k_aa049eb9-e9cf-47c9-a06b-91e8c787e6c1/kube-rbac-proxy/0.log" Sep 29 20:19:29 crc kubenswrapper[4792]: I0929 20:19:29.367217 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-b8d54b5d7-x5h9k_aa049eb9-e9cf-47c9-a06b-91e8c787e6c1/manager/0.log" Sep 29 20:19:29 crc kubenswrapper[4792]: I0929 20:19:29.405619 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-f66b554c6-frkgk_cc2b4990-0306-4b03-b344-b2e186883c4c/kube-rbac-proxy/0.log" Sep 29 20:19:29 crc kubenswrapper[4792]: I0929 20:19:29.472100 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-f66b554c6-frkgk_cc2b4990-0306-4b03-b344-b2e186883c4c/manager/0.log" Sep 29 20:19:29 crc kubenswrapper[4792]: I0929 20:19:29.531900 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-76669f99c-m88jp_5ae40942-75a6-41a6-877a-4070bd348d32/kube-rbac-proxy/0.log" Sep 29 20:19:29 crc kubenswrapper[4792]: I0929 20:19:29.569530 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-76669f99c-m88jp_5ae40942-75a6-41a6-877a-4070bd348d32/manager/0.log" Sep 29 20:19:46 crc kubenswrapper[4792]: I0929 20:19:46.012113 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_control-plane-machine-set-operator-78cbb6b69f-857r4_7ac58a67-2de7-48ec-9a6c-f7cf37538bdd/control-plane-machine-set-operator/0.log" Sep 29 20:19:46 crc kubenswrapper[4792]: I0929 20:19:46.157424 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-lwr4w_39e18b3b-156d-46e5-9ace-51ee36c17614/kube-rbac-proxy/0.log" Sep 29 20:19:46 crc kubenswrapper[4792]: I0929 20:19:46.223861 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-lwr4w_39e18b3b-156d-46e5-9ace-51ee36c17614/machine-api-operator/0.log" Sep 29 20:19:59 crc kubenswrapper[4792]: I0929 20:19:59.969381 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-5b446d88c5-sb29r_d603ca4a-b40c-439f-b7ed-09a279e9d727/cert-manager-controller/0.log" Sep 29 20:20:00 crc kubenswrapper[4792]: I0929 20:20:00.140108 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-cainjector-7f985d654d-pxgvr_4bdb9002-6a61-4c32-a32e-3a76cc24a38e/cert-manager-cainjector/0.log" Sep 29 20:20:00 crc kubenswrapper[4792]: I0929 20:20:00.215313 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-webhook-5655c58dd6-mx55q_b48f55d3-c9a5-4973-b233-f59ced6a17e6/cert-manager-webhook/0.log" Sep 29 20:20:11 crc kubenswrapper[4792]: I0929 20:20:11.959991 4792 patch_prober.go:28] interesting pod/machine-config-daemon-p5q59 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 20:20:11 crc kubenswrapper[4792]: I0929 20:20:11.960627 4792 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" podUID="0ae66548-086e-4ca9-bd6f-281ce46e7557" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 20:20:13 crc kubenswrapper[4792]: I0929 20:20:13.251379 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-console-plugin-864bb6dfb5-ps4tp_40d01e5b-1274-48a1-8510-4386dd7150bb/nmstate-console-plugin/0.log" Sep 29 20:20:13 crc kubenswrapper[4792]: I0929 20:20:13.280940 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-handler-w7wwz_7d8ab4ec-b506-4549-be62-9b914b9cb3f3/nmstate-handler/0.log" Sep 29 20:20:13 crc kubenswrapper[4792]: I0929 20:20:13.437808 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-58fcddf996-kjrzl_a1f9e458-fc58-4f84-89fc-9196c747d6ba/nmstate-metrics/0.log" Sep 29 20:20:13 crc kubenswrapper[4792]: I0929 20:20:13.506147 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-58fcddf996-kjrzl_a1f9e458-fc58-4f84-89fc-9196c747d6ba/kube-rbac-proxy/0.log" Sep 29 20:20:13 crc kubenswrapper[4792]: I0929 20:20:13.615113 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-operator-5d6f6cfd66-qr5w9_6a491845-f4ef-4f82-b716-d46be2982350/nmstate-operator/0.log" Sep 29 20:20:13 crc kubenswrapper[4792]: I0929 20:20:13.706701 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-webhook-6d689559c5-9gcgx_66a5aaa8-8ce1-4d34-a58e-843ff50ca9ef/nmstate-webhook/0.log" Sep 29 20:20:28 crc kubenswrapper[4792]: I0929 20:20:28.746168 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-5d688f5ffc-kjkvm_af42de4e-dfaa-4178-b742-d4388d56b58a/kube-rbac-proxy/0.log" Sep 29 20:20:28 crc kubenswrapper[4792]: I0929 20:20:28.948339 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-5d688f5ffc-kjkvm_af42de4e-dfaa-4178-b742-d4388d56b58a/controller/0.log" Sep 29 20:20:28 crc kubenswrapper[4792]: I0929 20:20:28.954432 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-webhook-server-5478bdb765-fw449_e4991702-0228-4b1e-abc9-01d614664746/frr-k8s-webhook-server/0.log" Sep 29 20:20:29 crc kubenswrapper[4792]: I0929 20:20:29.080230 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-wrpbj_698fb3db-6e7c-478d-bfae-70fbfa85e384/cp-frr-files/0.log" Sep 29 20:20:29 crc kubenswrapper[4792]: I0929 20:20:29.288817 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-wrpbj_698fb3db-6e7c-478d-bfae-70fbfa85e384/cp-reloader/0.log" Sep 29 20:20:29 crc kubenswrapper[4792]: I0929 20:20:29.322384 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-wrpbj_698fb3db-6e7c-478d-bfae-70fbfa85e384/cp-metrics/0.log" Sep 29 20:20:29 crc kubenswrapper[4792]: I0929 20:20:29.349985 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-wrpbj_698fb3db-6e7c-478d-bfae-70fbfa85e384/cp-reloader/0.log" Sep 29 20:20:29 crc kubenswrapper[4792]: I0929 20:20:29.350678 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-wrpbj_698fb3db-6e7c-478d-bfae-70fbfa85e384/cp-frr-files/0.log" Sep 29 20:20:29 crc kubenswrapper[4792]: I0929 20:20:29.526995 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-wrpbj_698fb3db-6e7c-478d-bfae-70fbfa85e384/cp-reloader/0.log" Sep 29 20:20:29 crc kubenswrapper[4792]: I0929 20:20:29.555038 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-wrpbj_698fb3db-6e7c-478d-bfae-70fbfa85e384/cp-metrics/0.log" Sep 29 20:20:29 crc kubenswrapper[4792]: I0929 20:20:29.644613 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-wrpbj_698fb3db-6e7c-478d-bfae-70fbfa85e384/cp-metrics/0.log" Sep 29 20:20:29 crc kubenswrapper[4792]: I0929 20:20:29.644651 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-wrpbj_698fb3db-6e7c-478d-bfae-70fbfa85e384/cp-frr-files/0.log" Sep 29 20:20:30 crc kubenswrapper[4792]: I0929 20:20:30.418165 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-wrpbj_698fb3db-6e7c-478d-bfae-70fbfa85e384/cp-reloader/0.log" Sep 29 20:20:30 crc kubenswrapper[4792]: I0929 20:20:30.436997 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-wrpbj_698fb3db-6e7c-478d-bfae-70fbfa85e384/cp-metrics/0.log" Sep 29 20:20:30 crc kubenswrapper[4792]: I0929 20:20:30.448286 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-wrpbj_698fb3db-6e7c-478d-bfae-70fbfa85e384/cp-frr-files/0.log" Sep 29 20:20:30 crc kubenswrapper[4792]: I0929 20:20:30.483548 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-wrpbj_698fb3db-6e7c-478d-bfae-70fbfa85e384/controller/0.log" Sep 29 20:20:30 crc kubenswrapper[4792]: I0929 20:20:30.596205 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-wrpbj_698fb3db-6e7c-478d-bfae-70fbfa85e384/frr-metrics/0.log" Sep 29 20:20:30 crc kubenswrapper[4792]: I0929 20:20:30.928072 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-wrpbj_698fb3db-6e7c-478d-bfae-70fbfa85e384/kube-rbac-proxy-frr/0.log" Sep 29 20:20:30 crc kubenswrapper[4792]: I0929 20:20:30.940598 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-wrpbj_698fb3db-6e7c-478d-bfae-70fbfa85e384/kube-rbac-proxy/0.log" Sep 29 20:20:31 crc kubenswrapper[4792]: I0929 20:20:31.073801 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-wrpbj_698fb3db-6e7c-478d-bfae-70fbfa85e384/reloader/0.log" Sep 29 20:20:31 crc kubenswrapper[4792]: I0929 20:20:31.253520 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-controller-manager-798fbb9bbf-rvlk8_f81e5f81-aa7e-4c65-900e-9a5929ca038b/manager/0.log" Sep 29 20:20:31 crc kubenswrapper[4792]: I0929 20:20:31.442378 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-webhook-server-5888bf57-l785s_19a31c23-7b44-4b0e-a627-1891480c5e03/webhook-server/0.log" Sep 29 20:20:31 crc kubenswrapper[4792]: I0929 20:20:31.640775 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-8csps_c8d5189a-0868-46dc-881c-077f4d5be810/kube-rbac-proxy/0.log" Sep 29 20:20:32 crc kubenswrapper[4792]: I0929 20:20:32.107998 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-8csps_c8d5189a-0868-46dc-881c-077f4d5be810/speaker/0.log" Sep 29 20:20:32 crc kubenswrapper[4792]: I0929 20:20:32.415636 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-wrpbj_698fb3db-6e7c-478d-bfae-70fbfa85e384/frr/0.log" Sep 29 20:20:41 crc kubenswrapper[4792]: I0929 20:20:41.959626 4792 patch_prober.go:28] interesting pod/machine-config-daemon-p5q59 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 20:20:41 crc kubenswrapper[4792]: I0929 20:20:41.961021 4792 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" podUID="0ae66548-086e-4ca9-bd6f-281ce46e7557" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 20:20:44 crc kubenswrapper[4792]: I0929 20:20:44.035359 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcv7fhk_eca64cc0-f739-41b8-812c-55536fc117b7/util/0.log" Sep 29 20:20:44 crc kubenswrapper[4792]: I0929 20:20:44.198805 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcv7fhk_eca64cc0-f739-41b8-812c-55536fc117b7/util/0.log" Sep 29 20:20:44 crc kubenswrapper[4792]: I0929 20:20:44.218801 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcv7fhk_eca64cc0-f739-41b8-812c-55536fc117b7/pull/0.log" Sep 29 20:20:44 crc kubenswrapper[4792]: I0929 20:20:44.267276 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcv7fhk_eca64cc0-f739-41b8-812c-55536fc117b7/pull/0.log" Sep 29 20:20:44 crc kubenswrapper[4792]: I0929 20:20:44.398659 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcv7fhk_eca64cc0-f739-41b8-812c-55536fc117b7/extract/0.log" Sep 29 20:20:44 crc kubenswrapper[4792]: I0929 20:20:44.406933 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcv7fhk_eca64cc0-f739-41b8-812c-55536fc117b7/pull/0.log" Sep 29 20:20:44 crc kubenswrapper[4792]: I0929 20:20:44.419259 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcv7fhk_eca64cc0-f739-41b8-812c-55536fc117b7/util/0.log" Sep 29 20:20:44 crc kubenswrapper[4792]: I0929 20:20:44.582940 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-7dj2p_a3990f16-15c9-49ab-9e7c-ded88e6f7043/extract-utilities/0.log" Sep 29 20:20:44 crc kubenswrapper[4792]: I0929 20:20:44.710694 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-7dj2p_a3990f16-15c9-49ab-9e7c-ded88e6f7043/extract-content/0.log" Sep 29 20:20:44 crc kubenswrapper[4792]: I0929 20:20:44.749390 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-7dj2p_a3990f16-15c9-49ab-9e7c-ded88e6f7043/extract-content/0.log" Sep 29 20:20:44 crc kubenswrapper[4792]: I0929 20:20:44.749963 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-7dj2p_a3990f16-15c9-49ab-9e7c-ded88e6f7043/extract-utilities/0.log" Sep 29 20:20:44 crc kubenswrapper[4792]: I0929 20:20:44.947643 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-7dj2p_a3990f16-15c9-49ab-9e7c-ded88e6f7043/extract-utilities/0.log" Sep 29 20:20:44 crc kubenswrapper[4792]: I0929 20:20:44.961754 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-7dj2p_a3990f16-15c9-49ab-9e7c-ded88e6f7043/extract-content/0.log" Sep 29 20:20:45 crc kubenswrapper[4792]: I0929 20:20:45.236243 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-dbs72_c175ab0e-c63b-4263-bca3-ec28d3165c93/extract-utilities/0.log" Sep 29 20:20:45 crc kubenswrapper[4792]: I0929 20:20:45.406986 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-dbs72_c175ab0e-c63b-4263-bca3-ec28d3165c93/extract-utilities/0.log" Sep 29 20:20:45 crc kubenswrapper[4792]: I0929 20:20:45.478398 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-dbs72_c175ab0e-c63b-4263-bca3-ec28d3165c93/extract-content/0.log" Sep 29 20:20:45 crc kubenswrapper[4792]: I0929 20:20:45.486804 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-dbs72_c175ab0e-c63b-4263-bca3-ec28d3165c93/extract-content/0.log" Sep 29 20:20:45 crc kubenswrapper[4792]: I0929 20:20:45.544514 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-7dj2p_a3990f16-15c9-49ab-9e7c-ded88e6f7043/registry-server/0.log" Sep 29 20:20:45 crc kubenswrapper[4792]: I0929 20:20:45.686921 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-dbs72_c175ab0e-c63b-4263-bca3-ec28d3165c93/extract-utilities/0.log" Sep 29 20:20:45 crc kubenswrapper[4792]: I0929 20:20:45.704619 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-dbs72_c175ab0e-c63b-4263-bca3-ec28d3165c93/extract-content/0.log" Sep 29 20:20:45 crc kubenswrapper[4792]: I0929 20:20:45.904775 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96x69rv_d52e4791-ce38-457b-a2b2-83e5a4f491ab/util/0.log" Sep 29 20:20:46 crc kubenswrapper[4792]: I0929 20:20:46.209661 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96x69rv_d52e4791-ce38-457b-a2b2-83e5a4f491ab/pull/0.log" Sep 29 20:20:46 crc kubenswrapper[4792]: I0929 20:20:46.251369 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96x69rv_d52e4791-ce38-457b-a2b2-83e5a4f491ab/util/0.log" Sep 29 20:20:46 crc kubenswrapper[4792]: I0929 20:20:46.284236 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-dbs72_c175ab0e-c63b-4263-bca3-ec28d3165c93/registry-server/0.log" Sep 29 20:20:46 crc kubenswrapper[4792]: I0929 20:20:46.367592 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96x69rv_d52e4791-ce38-457b-a2b2-83e5a4f491ab/pull/0.log" Sep 29 20:20:46 crc kubenswrapper[4792]: I0929 20:20:46.511274 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96x69rv_d52e4791-ce38-457b-a2b2-83e5a4f491ab/util/0.log" Sep 29 20:20:46 crc kubenswrapper[4792]: I0929 20:20:46.531806 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96x69rv_d52e4791-ce38-457b-a2b2-83e5a4f491ab/extract/0.log" Sep 29 20:20:46 crc kubenswrapper[4792]: I0929 20:20:46.547914 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96x69rv_d52e4791-ce38-457b-a2b2-83e5a4f491ab/pull/0.log" Sep 29 20:20:46 crc kubenswrapper[4792]: I0929 20:20:46.701983 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-mjdnk_7ae10600-0f4b-4b98-b304-a13cb5283d63/marketplace-operator/0.log" Sep 29 20:20:46 crc kubenswrapper[4792]: I0929 20:20:46.732528 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-r5rqb_e799b8b0-ae24-4880-ad4c-11dc094789f4/extract-utilities/0.log" Sep 29 20:20:46 crc kubenswrapper[4792]: I0929 20:20:46.944145 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-r5rqb_e799b8b0-ae24-4880-ad4c-11dc094789f4/extract-content/0.log" Sep 29 20:20:46 crc kubenswrapper[4792]: I0929 20:20:46.967556 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-r5rqb_e799b8b0-ae24-4880-ad4c-11dc094789f4/extract-utilities/0.log" Sep 29 20:20:46 crc kubenswrapper[4792]: I0929 20:20:46.990321 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-r5rqb_e799b8b0-ae24-4880-ad4c-11dc094789f4/extract-content/0.log" Sep 29 20:20:47 crc kubenswrapper[4792]: I0929 20:20:47.224551 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-r5rqb_e799b8b0-ae24-4880-ad4c-11dc094789f4/extract-utilities/0.log" Sep 29 20:20:47 crc kubenswrapper[4792]: I0929 20:20:47.261216 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-r5rqb_e799b8b0-ae24-4880-ad4c-11dc094789f4/extract-content/0.log" Sep 29 20:20:47 crc kubenswrapper[4792]: I0929 20:20:47.324342 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-r5rqb_e799b8b0-ae24-4880-ad4c-11dc094789f4/registry-server/0.log" Sep 29 20:20:47 crc kubenswrapper[4792]: I0929 20:20:47.431049 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-dh2pn_74049985-d99b-416d-80a0-2f73a2253f79/extract-utilities/0.log" Sep 29 20:20:47 crc kubenswrapper[4792]: I0929 20:20:47.647956 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-dh2pn_74049985-d99b-416d-80a0-2f73a2253f79/extract-content/0.log" Sep 29 20:20:47 crc kubenswrapper[4792]: I0929 20:20:47.657255 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-dh2pn_74049985-d99b-416d-80a0-2f73a2253f79/extract-content/0.log" Sep 29 20:20:47 crc kubenswrapper[4792]: I0929 20:20:47.658485 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-dh2pn_74049985-d99b-416d-80a0-2f73a2253f79/extract-utilities/0.log" Sep 29 20:20:48 crc kubenswrapper[4792]: I0929 20:20:48.148688 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-dh2pn_74049985-d99b-416d-80a0-2f73a2253f79/extract-utilities/0.log" Sep 29 20:20:48 crc kubenswrapper[4792]: I0929 20:20:48.241012 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-dh2pn_74049985-d99b-416d-80a0-2f73a2253f79/extract-content/0.log" Sep 29 20:20:48 crc kubenswrapper[4792]: I0929 20:20:48.733786 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-dh2pn_74049985-d99b-416d-80a0-2f73a2253f79/registry-server/0.log" Sep 29 20:21:11 crc kubenswrapper[4792]: I0929 20:21:11.959343 4792 patch_prober.go:28] interesting pod/machine-config-daemon-p5q59 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 20:21:11 crc kubenswrapper[4792]: I0929 20:21:11.960682 4792 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" podUID="0ae66548-086e-4ca9-bd6f-281ce46e7557" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 20:21:11 crc kubenswrapper[4792]: I0929 20:21:11.960776 4792 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" Sep 29 20:21:11 crc kubenswrapper[4792]: I0929 20:21:11.961600 4792 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"879c3d6f11c2b7321b612ee9d2f622b0df24ff3872f25e237d9ed363b4b76971"} pod="openshift-machine-config-operator/machine-config-daemon-p5q59" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 29 20:21:11 crc kubenswrapper[4792]: I0929 20:21:11.961726 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" podUID="0ae66548-086e-4ca9-bd6f-281ce46e7557" containerName="machine-config-daemon" containerID="cri-o://879c3d6f11c2b7321b612ee9d2f622b0df24ff3872f25e237d9ed363b4b76971" gracePeriod=600 Sep 29 20:21:12 crc kubenswrapper[4792]: I0929 20:21:12.114106 4792 generic.go:334] "Generic (PLEG): container finished" podID="0ae66548-086e-4ca9-bd6f-281ce46e7557" containerID="879c3d6f11c2b7321b612ee9d2f622b0df24ff3872f25e237d9ed363b4b76971" exitCode=0 Sep 29 20:21:12 crc kubenswrapper[4792]: I0929 20:21:12.114164 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" event={"ID":"0ae66548-086e-4ca9-bd6f-281ce46e7557","Type":"ContainerDied","Data":"879c3d6f11c2b7321b612ee9d2f622b0df24ff3872f25e237d9ed363b4b76971"} Sep 29 20:21:12 crc kubenswrapper[4792]: I0929 20:21:12.114210 4792 scope.go:117] "RemoveContainer" containerID="0c32d1da37ddbc11c79c6be27e5d9a54bae2c6e1dd6cff428ba145dc4ed7552e" Sep 29 20:21:13 crc kubenswrapper[4792]: I0929 20:21:13.124820 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" event={"ID":"0ae66548-086e-4ca9-bd6f-281ce46e7557","Type":"ContainerStarted","Data":"235c8fe40377a9db11f48a0adeb2b92f75cf88c383f3cca6400f6614f8ced329"} Sep 29 20:22:27 crc kubenswrapper[4792]: I0929 20:22:27.609030 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-b67rw"] Sep 29 20:22:27 crc kubenswrapper[4792]: E0929 20:22:27.610066 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="383b2cfa-8a7f-4a6c-a2a2-b6732603d780" containerName="container-00" Sep 29 20:22:27 crc kubenswrapper[4792]: I0929 20:22:27.610083 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="383b2cfa-8a7f-4a6c-a2a2-b6732603d780" containerName="container-00" Sep 29 20:22:27 crc kubenswrapper[4792]: I0929 20:22:27.610335 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="383b2cfa-8a7f-4a6c-a2a2-b6732603d780" containerName="container-00" Sep 29 20:22:27 crc kubenswrapper[4792]: I0929 20:22:27.612116 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-b67rw" Sep 29 20:22:27 crc kubenswrapper[4792]: I0929 20:22:27.627906 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-b67rw"] Sep 29 20:22:27 crc kubenswrapper[4792]: I0929 20:22:27.697544 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/789e7525-6b57-4046-a15d-662d334b662f-utilities\") pod \"community-operators-b67rw\" (UID: \"789e7525-6b57-4046-a15d-662d334b662f\") " pod="openshift-marketplace/community-operators-b67rw" Sep 29 20:22:27 crc kubenswrapper[4792]: I0929 20:22:27.697796 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pm5mx\" (UniqueName: \"kubernetes.io/projected/789e7525-6b57-4046-a15d-662d334b662f-kube-api-access-pm5mx\") pod \"community-operators-b67rw\" (UID: \"789e7525-6b57-4046-a15d-662d334b662f\") " pod="openshift-marketplace/community-operators-b67rw" Sep 29 20:22:27 crc kubenswrapper[4792]: I0929 20:22:27.697977 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/789e7525-6b57-4046-a15d-662d334b662f-catalog-content\") pod \"community-operators-b67rw\" (UID: \"789e7525-6b57-4046-a15d-662d334b662f\") " pod="openshift-marketplace/community-operators-b67rw" Sep 29 20:22:27 crc kubenswrapper[4792]: I0929 20:22:27.799462 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/789e7525-6b57-4046-a15d-662d334b662f-catalog-content\") pod \"community-operators-b67rw\" (UID: \"789e7525-6b57-4046-a15d-662d334b662f\") " pod="openshift-marketplace/community-operators-b67rw" Sep 29 20:22:27 crc kubenswrapper[4792]: I0929 20:22:27.800157 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/789e7525-6b57-4046-a15d-662d334b662f-catalog-content\") pod \"community-operators-b67rw\" (UID: \"789e7525-6b57-4046-a15d-662d334b662f\") " pod="openshift-marketplace/community-operators-b67rw" Sep 29 20:22:27 crc kubenswrapper[4792]: I0929 20:22:27.800516 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/789e7525-6b57-4046-a15d-662d334b662f-utilities\") pod \"community-operators-b67rw\" (UID: \"789e7525-6b57-4046-a15d-662d334b662f\") " pod="openshift-marketplace/community-operators-b67rw" Sep 29 20:22:27 crc kubenswrapper[4792]: I0929 20:22:27.800968 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pm5mx\" (UniqueName: \"kubernetes.io/projected/789e7525-6b57-4046-a15d-662d334b662f-kube-api-access-pm5mx\") pod \"community-operators-b67rw\" (UID: \"789e7525-6b57-4046-a15d-662d334b662f\") " pod="openshift-marketplace/community-operators-b67rw" Sep 29 20:22:27 crc kubenswrapper[4792]: I0929 20:22:27.800900 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/789e7525-6b57-4046-a15d-662d334b662f-utilities\") pod \"community-operators-b67rw\" (UID: \"789e7525-6b57-4046-a15d-662d334b662f\") " pod="openshift-marketplace/community-operators-b67rw" Sep 29 20:22:27 crc kubenswrapper[4792]: I0929 20:22:27.818086 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pm5mx\" (UniqueName: \"kubernetes.io/projected/789e7525-6b57-4046-a15d-662d334b662f-kube-api-access-pm5mx\") pod \"community-operators-b67rw\" (UID: \"789e7525-6b57-4046-a15d-662d334b662f\") " pod="openshift-marketplace/community-operators-b67rw" Sep 29 20:22:27 crc kubenswrapper[4792]: I0929 20:22:27.951016 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-b67rw" Sep 29 20:22:28 crc kubenswrapper[4792]: I0929 20:22:28.488512 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-b67rw"] Sep 29 20:22:28 crc kubenswrapper[4792]: I0929 20:22:28.988351 4792 generic.go:334] "Generic (PLEG): container finished" podID="789e7525-6b57-4046-a15d-662d334b662f" containerID="44c01ec0f74c379ed89cad562e5d018f6b7c5b04b64add394a34c3d4e8ae1bbb" exitCode=0 Sep 29 20:22:28 crc kubenswrapper[4792]: I0929 20:22:28.988399 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-b67rw" event={"ID":"789e7525-6b57-4046-a15d-662d334b662f","Type":"ContainerDied","Data":"44c01ec0f74c379ed89cad562e5d018f6b7c5b04b64add394a34c3d4e8ae1bbb"} Sep 29 20:22:28 crc kubenswrapper[4792]: I0929 20:22:28.988435 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-b67rw" event={"ID":"789e7525-6b57-4046-a15d-662d334b662f","Type":"ContainerStarted","Data":"1d38a9e613879def350499a17bdc2ba958dfe55dc848974ec82c679142cf38b0"} Sep 29 20:22:28 crc kubenswrapper[4792]: I0929 20:22:28.993028 4792 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Sep 29 20:22:30 crc kubenswrapper[4792]: I0929 20:22:30.001901 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-b67rw" event={"ID":"789e7525-6b57-4046-a15d-662d334b662f","Type":"ContainerStarted","Data":"d4e77c050c7262d1595802e95ab13789ad2de71d2c2bf448e5e9ba74c8f81558"} Sep 29 20:22:31 crc kubenswrapper[4792]: I0929 20:22:31.026262 4792 generic.go:334] "Generic (PLEG): container finished" podID="789e7525-6b57-4046-a15d-662d334b662f" containerID="d4e77c050c7262d1595802e95ab13789ad2de71d2c2bf448e5e9ba74c8f81558" exitCode=0 Sep 29 20:22:31 crc kubenswrapper[4792]: I0929 20:22:31.051136 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-b67rw" event={"ID":"789e7525-6b57-4046-a15d-662d334b662f","Type":"ContainerDied","Data":"d4e77c050c7262d1595802e95ab13789ad2de71d2c2bf448e5e9ba74c8f81558"} Sep 29 20:22:32 crc kubenswrapper[4792]: I0929 20:22:32.034732 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-b67rw" event={"ID":"789e7525-6b57-4046-a15d-662d334b662f","Type":"ContainerStarted","Data":"090c3d63f62951a41b68a6a881ead82caa00d464bc320e4474813667bf3fc51a"} Sep 29 20:22:32 crc kubenswrapper[4792]: I0929 20:22:32.053269 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-b67rw" podStartSLOduration=2.612784967 podStartE2EDuration="5.053250763s" podCreationTimestamp="2025-09-29 20:22:27 +0000 UTC" firstStartedPulling="2025-09-29 20:22:28.992772703 +0000 UTC m=+5160.986080099" lastFinishedPulling="2025-09-29 20:22:31.433238509 +0000 UTC m=+5163.426545895" observedRunningTime="2025-09-29 20:22:32.050012989 +0000 UTC m=+5164.043320385" watchObservedRunningTime="2025-09-29 20:22:32.053250763 +0000 UTC m=+5164.046558159" Sep 29 20:22:37 crc kubenswrapper[4792]: I0929 20:22:37.953028 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-b67rw" Sep 29 20:22:37 crc kubenswrapper[4792]: I0929 20:22:37.953546 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-b67rw" Sep 29 20:22:38 crc kubenswrapper[4792]: I0929 20:22:38.024677 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-b67rw" Sep 29 20:22:38 crc kubenswrapper[4792]: I0929 20:22:38.130186 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-b67rw" Sep 29 20:22:38 crc kubenswrapper[4792]: I0929 20:22:38.260092 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-b67rw"] Sep 29 20:22:40 crc kubenswrapper[4792]: I0929 20:22:40.107265 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-b67rw" podUID="789e7525-6b57-4046-a15d-662d334b662f" containerName="registry-server" containerID="cri-o://090c3d63f62951a41b68a6a881ead82caa00d464bc320e4474813667bf3fc51a" gracePeriod=2 Sep 29 20:22:41 crc kubenswrapper[4792]: I0929 20:22:41.050377 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-b67rw" Sep 29 20:22:41 crc kubenswrapper[4792]: I0929 20:22:41.129884 4792 generic.go:334] "Generic (PLEG): container finished" podID="789e7525-6b57-4046-a15d-662d334b662f" containerID="090c3d63f62951a41b68a6a881ead82caa00d464bc320e4474813667bf3fc51a" exitCode=0 Sep 29 20:22:41 crc kubenswrapper[4792]: I0929 20:22:41.129921 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-b67rw" event={"ID":"789e7525-6b57-4046-a15d-662d334b662f","Type":"ContainerDied","Data":"090c3d63f62951a41b68a6a881ead82caa00d464bc320e4474813667bf3fc51a"} Sep 29 20:22:41 crc kubenswrapper[4792]: I0929 20:22:41.129964 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-b67rw" event={"ID":"789e7525-6b57-4046-a15d-662d334b662f","Type":"ContainerDied","Data":"1d38a9e613879def350499a17bdc2ba958dfe55dc848974ec82c679142cf38b0"} Sep 29 20:22:41 crc kubenswrapper[4792]: I0929 20:22:41.129983 4792 scope.go:117] "RemoveContainer" containerID="090c3d63f62951a41b68a6a881ead82caa00d464bc320e4474813667bf3fc51a" Sep 29 20:22:41 crc kubenswrapper[4792]: I0929 20:22:41.130050 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-b67rw" Sep 29 20:22:41 crc kubenswrapper[4792]: I0929 20:22:41.164021 4792 scope.go:117] "RemoveContainer" containerID="d4e77c050c7262d1595802e95ab13789ad2de71d2c2bf448e5e9ba74c8f81558" Sep 29 20:22:41 crc kubenswrapper[4792]: I0929 20:22:41.169893 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/789e7525-6b57-4046-a15d-662d334b662f-catalog-content\") pod \"789e7525-6b57-4046-a15d-662d334b662f\" (UID: \"789e7525-6b57-4046-a15d-662d334b662f\") " Sep 29 20:22:41 crc kubenswrapper[4792]: I0929 20:22:41.170017 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/789e7525-6b57-4046-a15d-662d334b662f-utilities\") pod \"789e7525-6b57-4046-a15d-662d334b662f\" (UID: \"789e7525-6b57-4046-a15d-662d334b662f\") " Sep 29 20:22:41 crc kubenswrapper[4792]: I0929 20:22:41.170092 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pm5mx\" (UniqueName: \"kubernetes.io/projected/789e7525-6b57-4046-a15d-662d334b662f-kube-api-access-pm5mx\") pod \"789e7525-6b57-4046-a15d-662d334b662f\" (UID: \"789e7525-6b57-4046-a15d-662d334b662f\") " Sep 29 20:22:41 crc kubenswrapper[4792]: I0929 20:22:41.172591 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/789e7525-6b57-4046-a15d-662d334b662f-utilities" (OuterVolumeSpecName: "utilities") pod "789e7525-6b57-4046-a15d-662d334b662f" (UID: "789e7525-6b57-4046-a15d-662d334b662f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 20:22:41 crc kubenswrapper[4792]: I0929 20:22:41.183194 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/789e7525-6b57-4046-a15d-662d334b662f-kube-api-access-pm5mx" (OuterVolumeSpecName: "kube-api-access-pm5mx") pod "789e7525-6b57-4046-a15d-662d334b662f" (UID: "789e7525-6b57-4046-a15d-662d334b662f"). InnerVolumeSpecName "kube-api-access-pm5mx". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 20:22:41 crc kubenswrapper[4792]: I0929 20:22:41.227060 4792 scope.go:117] "RemoveContainer" containerID="44c01ec0f74c379ed89cad562e5d018f6b7c5b04b64add394a34c3d4e8ae1bbb" Sep 29 20:22:41 crc kubenswrapper[4792]: I0929 20:22:41.237279 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/789e7525-6b57-4046-a15d-662d334b662f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "789e7525-6b57-4046-a15d-662d334b662f" (UID: "789e7525-6b57-4046-a15d-662d334b662f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 20:22:41 crc kubenswrapper[4792]: I0929 20:22:41.263041 4792 scope.go:117] "RemoveContainer" containerID="090c3d63f62951a41b68a6a881ead82caa00d464bc320e4474813667bf3fc51a" Sep 29 20:22:41 crc kubenswrapper[4792]: E0929 20:22:41.263578 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"090c3d63f62951a41b68a6a881ead82caa00d464bc320e4474813667bf3fc51a\": container with ID starting with 090c3d63f62951a41b68a6a881ead82caa00d464bc320e4474813667bf3fc51a not found: ID does not exist" containerID="090c3d63f62951a41b68a6a881ead82caa00d464bc320e4474813667bf3fc51a" Sep 29 20:22:41 crc kubenswrapper[4792]: I0929 20:22:41.263703 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"090c3d63f62951a41b68a6a881ead82caa00d464bc320e4474813667bf3fc51a"} err="failed to get container status \"090c3d63f62951a41b68a6a881ead82caa00d464bc320e4474813667bf3fc51a\": rpc error: code = NotFound desc = could not find container \"090c3d63f62951a41b68a6a881ead82caa00d464bc320e4474813667bf3fc51a\": container with ID starting with 090c3d63f62951a41b68a6a881ead82caa00d464bc320e4474813667bf3fc51a not found: ID does not exist" Sep 29 20:22:41 crc kubenswrapper[4792]: I0929 20:22:41.263797 4792 scope.go:117] "RemoveContainer" containerID="d4e77c050c7262d1595802e95ab13789ad2de71d2c2bf448e5e9ba74c8f81558" Sep 29 20:22:41 crc kubenswrapper[4792]: E0929 20:22:41.264205 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d4e77c050c7262d1595802e95ab13789ad2de71d2c2bf448e5e9ba74c8f81558\": container with ID starting with d4e77c050c7262d1595802e95ab13789ad2de71d2c2bf448e5e9ba74c8f81558 not found: ID does not exist" containerID="d4e77c050c7262d1595802e95ab13789ad2de71d2c2bf448e5e9ba74c8f81558" Sep 29 20:22:41 crc kubenswrapper[4792]: I0929 20:22:41.264317 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d4e77c050c7262d1595802e95ab13789ad2de71d2c2bf448e5e9ba74c8f81558"} err="failed to get container status \"d4e77c050c7262d1595802e95ab13789ad2de71d2c2bf448e5e9ba74c8f81558\": rpc error: code = NotFound desc = could not find container \"d4e77c050c7262d1595802e95ab13789ad2de71d2c2bf448e5e9ba74c8f81558\": container with ID starting with d4e77c050c7262d1595802e95ab13789ad2de71d2c2bf448e5e9ba74c8f81558 not found: ID does not exist" Sep 29 20:22:41 crc kubenswrapper[4792]: I0929 20:22:41.264403 4792 scope.go:117] "RemoveContainer" containerID="44c01ec0f74c379ed89cad562e5d018f6b7c5b04b64add394a34c3d4e8ae1bbb" Sep 29 20:22:41 crc kubenswrapper[4792]: E0929 20:22:41.264672 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"44c01ec0f74c379ed89cad562e5d018f6b7c5b04b64add394a34c3d4e8ae1bbb\": container with ID starting with 44c01ec0f74c379ed89cad562e5d018f6b7c5b04b64add394a34c3d4e8ae1bbb not found: ID does not exist" containerID="44c01ec0f74c379ed89cad562e5d018f6b7c5b04b64add394a34c3d4e8ae1bbb" Sep 29 20:22:41 crc kubenswrapper[4792]: I0929 20:22:41.264761 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"44c01ec0f74c379ed89cad562e5d018f6b7c5b04b64add394a34c3d4e8ae1bbb"} err="failed to get container status \"44c01ec0f74c379ed89cad562e5d018f6b7c5b04b64add394a34c3d4e8ae1bbb\": rpc error: code = NotFound desc = could not find container \"44c01ec0f74c379ed89cad562e5d018f6b7c5b04b64add394a34c3d4e8ae1bbb\": container with ID starting with 44c01ec0f74c379ed89cad562e5d018f6b7c5b04b64add394a34c3d4e8ae1bbb not found: ID does not exist" Sep 29 20:22:41 crc kubenswrapper[4792]: I0929 20:22:41.273173 4792 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/789e7525-6b57-4046-a15d-662d334b662f-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 20:22:41 crc kubenswrapper[4792]: I0929 20:22:41.273307 4792 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/789e7525-6b57-4046-a15d-662d334b662f-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 20:22:41 crc kubenswrapper[4792]: I0929 20:22:41.273382 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pm5mx\" (UniqueName: \"kubernetes.io/projected/789e7525-6b57-4046-a15d-662d334b662f-kube-api-access-pm5mx\") on node \"crc\" DevicePath \"\"" Sep 29 20:22:41 crc kubenswrapper[4792]: I0929 20:22:41.461658 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-b67rw"] Sep 29 20:22:41 crc kubenswrapper[4792]: I0929 20:22:41.471178 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-b67rw"] Sep 29 20:22:43 crc kubenswrapper[4792]: I0929 20:22:43.031418 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="789e7525-6b57-4046-a15d-662d334b662f" path="/var/lib/kubelet/pods/789e7525-6b57-4046-a15d-662d334b662f/volumes" Sep 29 20:23:17 crc kubenswrapper[4792]: I0929 20:23:17.518994 4792 generic.go:334] "Generic (PLEG): container finished" podID="7227bcb0-4ce5-440a-80ad-e18135a805ac" containerID="ea9ad986f1669a8869fb09529a31480515f8a6422922df1c7e1f11680090b138" exitCode=0 Sep 29 20:23:17 crc kubenswrapper[4792]: I0929 20:23:17.519039 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-9nm55/must-gather-swl2j" event={"ID":"7227bcb0-4ce5-440a-80ad-e18135a805ac","Type":"ContainerDied","Data":"ea9ad986f1669a8869fb09529a31480515f8a6422922df1c7e1f11680090b138"} Sep 29 20:23:17 crc kubenswrapper[4792]: I0929 20:23:17.521623 4792 scope.go:117] "RemoveContainer" containerID="ea9ad986f1669a8869fb09529a31480515f8a6422922df1c7e1f11680090b138" Sep 29 20:23:18 crc kubenswrapper[4792]: I0929 20:23:18.035918 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-9nm55_must-gather-swl2j_7227bcb0-4ce5-440a-80ad-e18135a805ac/gather/0.log" Sep 29 20:23:31 crc kubenswrapper[4792]: I0929 20:23:31.539981 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-9nm55/must-gather-swl2j"] Sep 29 20:23:31 crc kubenswrapper[4792]: I0929 20:23:31.540605 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-must-gather-9nm55/must-gather-swl2j" podUID="7227bcb0-4ce5-440a-80ad-e18135a805ac" containerName="copy" containerID="cri-o://396ff91edd758e2f25ab9f7ae7319b29a728f8f5ff2e934244dbed8467d360ed" gracePeriod=2 Sep 29 20:23:31 crc kubenswrapper[4792]: I0929 20:23:31.547508 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-9nm55/must-gather-swl2j"] Sep 29 20:23:31 crc kubenswrapper[4792]: I0929 20:23:31.681551 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-9nm55_must-gather-swl2j_7227bcb0-4ce5-440a-80ad-e18135a805ac/copy/0.log" Sep 29 20:23:31 crc kubenswrapper[4792]: I0929 20:23:31.685505 4792 generic.go:334] "Generic (PLEG): container finished" podID="7227bcb0-4ce5-440a-80ad-e18135a805ac" containerID="396ff91edd758e2f25ab9f7ae7319b29a728f8f5ff2e934244dbed8467d360ed" exitCode=143 Sep 29 20:23:31 crc kubenswrapper[4792]: I0929 20:23:31.954821 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-9nm55_must-gather-swl2j_7227bcb0-4ce5-440a-80ad-e18135a805ac/copy/0.log" Sep 29 20:23:31 crc kubenswrapper[4792]: I0929 20:23:31.955222 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-9nm55/must-gather-swl2j" Sep 29 20:23:32 crc kubenswrapper[4792]: I0929 20:23:32.138539 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-f46q4\" (UniqueName: \"kubernetes.io/projected/7227bcb0-4ce5-440a-80ad-e18135a805ac-kube-api-access-f46q4\") pod \"7227bcb0-4ce5-440a-80ad-e18135a805ac\" (UID: \"7227bcb0-4ce5-440a-80ad-e18135a805ac\") " Sep 29 20:23:32 crc kubenswrapper[4792]: I0929 20:23:32.138696 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/7227bcb0-4ce5-440a-80ad-e18135a805ac-must-gather-output\") pod \"7227bcb0-4ce5-440a-80ad-e18135a805ac\" (UID: \"7227bcb0-4ce5-440a-80ad-e18135a805ac\") " Sep 29 20:23:32 crc kubenswrapper[4792]: I0929 20:23:32.148681 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7227bcb0-4ce5-440a-80ad-e18135a805ac-kube-api-access-f46q4" (OuterVolumeSpecName: "kube-api-access-f46q4") pod "7227bcb0-4ce5-440a-80ad-e18135a805ac" (UID: "7227bcb0-4ce5-440a-80ad-e18135a805ac"). InnerVolumeSpecName "kube-api-access-f46q4". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 20:23:32 crc kubenswrapper[4792]: I0929 20:23:32.241071 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-f46q4\" (UniqueName: \"kubernetes.io/projected/7227bcb0-4ce5-440a-80ad-e18135a805ac-kube-api-access-f46q4\") on node \"crc\" DevicePath \"\"" Sep 29 20:23:32 crc kubenswrapper[4792]: I0929 20:23:32.331259 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7227bcb0-4ce5-440a-80ad-e18135a805ac-must-gather-output" (OuterVolumeSpecName: "must-gather-output") pod "7227bcb0-4ce5-440a-80ad-e18135a805ac" (UID: "7227bcb0-4ce5-440a-80ad-e18135a805ac"). InnerVolumeSpecName "must-gather-output". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 20:23:32 crc kubenswrapper[4792]: I0929 20:23:32.342758 4792 reconciler_common.go:293] "Volume detached for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/7227bcb0-4ce5-440a-80ad-e18135a805ac-must-gather-output\") on node \"crc\" DevicePath \"\"" Sep 29 20:23:32 crc kubenswrapper[4792]: I0929 20:23:32.694182 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-9nm55_must-gather-swl2j_7227bcb0-4ce5-440a-80ad-e18135a805ac/copy/0.log" Sep 29 20:23:32 crc kubenswrapper[4792]: I0929 20:23:32.694615 4792 scope.go:117] "RemoveContainer" containerID="396ff91edd758e2f25ab9f7ae7319b29a728f8f5ff2e934244dbed8467d360ed" Sep 29 20:23:32 crc kubenswrapper[4792]: I0929 20:23:32.694752 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-9nm55/must-gather-swl2j" Sep 29 20:23:32 crc kubenswrapper[4792]: I0929 20:23:32.715903 4792 scope.go:117] "RemoveContainer" containerID="ea9ad986f1669a8869fb09529a31480515f8a6422922df1c7e1f11680090b138" Sep 29 20:23:33 crc kubenswrapper[4792]: I0929 20:23:33.027250 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7227bcb0-4ce5-440a-80ad-e18135a805ac" path="/var/lib/kubelet/pods/7227bcb0-4ce5-440a-80ad-e18135a805ac/volumes" Sep 29 20:23:40 crc kubenswrapper[4792]: I0929 20:23:40.011417 4792 scope.go:117] "RemoveContainer" containerID="57c5d7bc52cb4b6c598309bd8cd2c8764f58fca44416068346e91843b2cc1de7" Sep 29 20:23:41 crc kubenswrapper[4792]: I0929 20:23:41.959306 4792 patch_prober.go:28] interesting pod/machine-config-daemon-p5q59 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 20:23:41 crc kubenswrapper[4792]: I0929 20:23:41.959611 4792 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" podUID="0ae66548-086e-4ca9-bd6f-281ce46e7557" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 20:24:11 crc kubenswrapper[4792]: I0929 20:24:11.959491 4792 patch_prober.go:28] interesting pod/machine-config-daemon-p5q59 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 20:24:11 crc kubenswrapper[4792]: I0929 20:24:11.960141 4792 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" podUID="0ae66548-086e-4ca9-bd6f-281ce46e7557" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 20:24:34 crc kubenswrapper[4792]: I0929 20:24:34.772961 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-jmchx"] Sep 29 20:24:34 crc kubenswrapper[4792]: E0929 20:24:34.775706 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="789e7525-6b57-4046-a15d-662d334b662f" containerName="extract-content" Sep 29 20:24:34 crc kubenswrapper[4792]: I0929 20:24:34.775923 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="789e7525-6b57-4046-a15d-662d334b662f" containerName="extract-content" Sep 29 20:24:34 crc kubenswrapper[4792]: E0929 20:24:34.776147 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="789e7525-6b57-4046-a15d-662d334b662f" containerName="extract-utilities" Sep 29 20:24:34 crc kubenswrapper[4792]: I0929 20:24:34.776341 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="789e7525-6b57-4046-a15d-662d334b662f" containerName="extract-utilities" Sep 29 20:24:34 crc kubenswrapper[4792]: E0929 20:24:34.776508 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7227bcb0-4ce5-440a-80ad-e18135a805ac" containerName="copy" Sep 29 20:24:34 crc kubenswrapper[4792]: I0929 20:24:34.776659 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="7227bcb0-4ce5-440a-80ad-e18135a805ac" containerName="copy" Sep 29 20:24:34 crc kubenswrapper[4792]: E0929 20:24:34.776848 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7227bcb0-4ce5-440a-80ad-e18135a805ac" containerName="gather" Sep 29 20:24:34 crc kubenswrapper[4792]: I0929 20:24:34.777065 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="7227bcb0-4ce5-440a-80ad-e18135a805ac" containerName="gather" Sep 29 20:24:34 crc kubenswrapper[4792]: E0929 20:24:34.777230 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="789e7525-6b57-4046-a15d-662d334b662f" containerName="registry-server" Sep 29 20:24:34 crc kubenswrapper[4792]: I0929 20:24:34.778301 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="789e7525-6b57-4046-a15d-662d334b662f" containerName="registry-server" Sep 29 20:24:34 crc kubenswrapper[4792]: I0929 20:24:34.778957 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="7227bcb0-4ce5-440a-80ad-e18135a805ac" containerName="copy" Sep 29 20:24:34 crc kubenswrapper[4792]: I0929 20:24:34.779286 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="7227bcb0-4ce5-440a-80ad-e18135a805ac" containerName="gather" Sep 29 20:24:34 crc kubenswrapper[4792]: I0929 20:24:34.779594 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="789e7525-6b57-4046-a15d-662d334b662f" containerName="registry-server" Sep 29 20:24:34 crc kubenswrapper[4792]: I0929 20:24:34.782187 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-jmchx" Sep 29 20:24:34 crc kubenswrapper[4792]: I0929 20:24:34.787293 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qbrzh\" (UniqueName: \"kubernetes.io/projected/32337771-cf01-4dd9-988a-e42a03c4b287-kube-api-access-qbrzh\") pod \"redhat-marketplace-jmchx\" (UID: \"32337771-cf01-4dd9-988a-e42a03c4b287\") " pod="openshift-marketplace/redhat-marketplace-jmchx" Sep 29 20:24:34 crc kubenswrapper[4792]: I0929 20:24:34.787405 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/32337771-cf01-4dd9-988a-e42a03c4b287-utilities\") pod \"redhat-marketplace-jmchx\" (UID: \"32337771-cf01-4dd9-988a-e42a03c4b287\") " pod="openshift-marketplace/redhat-marketplace-jmchx" Sep 29 20:24:34 crc kubenswrapper[4792]: I0929 20:24:34.787528 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/32337771-cf01-4dd9-988a-e42a03c4b287-catalog-content\") pod \"redhat-marketplace-jmchx\" (UID: \"32337771-cf01-4dd9-988a-e42a03c4b287\") " pod="openshift-marketplace/redhat-marketplace-jmchx" Sep 29 20:24:34 crc kubenswrapper[4792]: I0929 20:24:34.791664 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-jmchx"] Sep 29 20:24:34 crc kubenswrapper[4792]: I0929 20:24:34.889093 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/32337771-cf01-4dd9-988a-e42a03c4b287-utilities\") pod \"redhat-marketplace-jmchx\" (UID: \"32337771-cf01-4dd9-988a-e42a03c4b287\") " pod="openshift-marketplace/redhat-marketplace-jmchx" Sep 29 20:24:34 crc kubenswrapper[4792]: I0929 20:24:34.889331 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/32337771-cf01-4dd9-988a-e42a03c4b287-catalog-content\") pod \"redhat-marketplace-jmchx\" (UID: \"32337771-cf01-4dd9-988a-e42a03c4b287\") " pod="openshift-marketplace/redhat-marketplace-jmchx" Sep 29 20:24:34 crc kubenswrapper[4792]: I0929 20:24:34.889599 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qbrzh\" (UniqueName: \"kubernetes.io/projected/32337771-cf01-4dd9-988a-e42a03c4b287-kube-api-access-qbrzh\") pod \"redhat-marketplace-jmchx\" (UID: \"32337771-cf01-4dd9-988a-e42a03c4b287\") " pod="openshift-marketplace/redhat-marketplace-jmchx" Sep 29 20:24:34 crc kubenswrapper[4792]: I0929 20:24:34.889804 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/32337771-cf01-4dd9-988a-e42a03c4b287-utilities\") pod \"redhat-marketplace-jmchx\" (UID: \"32337771-cf01-4dd9-988a-e42a03c4b287\") " pod="openshift-marketplace/redhat-marketplace-jmchx" Sep 29 20:24:34 crc kubenswrapper[4792]: I0929 20:24:34.890043 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/32337771-cf01-4dd9-988a-e42a03c4b287-catalog-content\") pod \"redhat-marketplace-jmchx\" (UID: \"32337771-cf01-4dd9-988a-e42a03c4b287\") " pod="openshift-marketplace/redhat-marketplace-jmchx" Sep 29 20:24:34 crc kubenswrapper[4792]: I0929 20:24:34.909259 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qbrzh\" (UniqueName: \"kubernetes.io/projected/32337771-cf01-4dd9-988a-e42a03c4b287-kube-api-access-qbrzh\") pod \"redhat-marketplace-jmchx\" (UID: \"32337771-cf01-4dd9-988a-e42a03c4b287\") " pod="openshift-marketplace/redhat-marketplace-jmchx" Sep 29 20:24:35 crc kubenswrapper[4792]: I0929 20:24:35.144433 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-jmchx" Sep 29 20:24:35 crc kubenswrapper[4792]: I0929 20:24:35.587735 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-jmchx"] Sep 29 20:24:36 crc kubenswrapper[4792]: I0929 20:24:36.474042 4792 generic.go:334] "Generic (PLEG): container finished" podID="32337771-cf01-4dd9-988a-e42a03c4b287" containerID="7612d004ac4b1a96bf0404f925edb4ef0795e8cb855f2992eb545b9d491c40c2" exitCode=0 Sep 29 20:24:36 crc kubenswrapper[4792]: I0929 20:24:36.474104 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-jmchx" event={"ID":"32337771-cf01-4dd9-988a-e42a03c4b287","Type":"ContainerDied","Data":"7612d004ac4b1a96bf0404f925edb4ef0795e8cb855f2992eb545b9d491c40c2"} Sep 29 20:24:36 crc kubenswrapper[4792]: I0929 20:24:36.474436 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-jmchx" event={"ID":"32337771-cf01-4dd9-988a-e42a03c4b287","Type":"ContainerStarted","Data":"12e46709f1191306b9ae44cf6022415407072f8676bca3fe6c4f22924f5d75dc"} Sep 29 20:24:37 crc kubenswrapper[4792]: I0929 20:24:37.485426 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-jmchx" event={"ID":"32337771-cf01-4dd9-988a-e42a03c4b287","Type":"ContainerStarted","Data":"316b2e8d82d52a01b4daf84ea2d968154656b4c0c29628563706de35132a6f82"} Sep 29 20:24:38 crc kubenswrapper[4792]: I0929 20:24:38.504550 4792 generic.go:334] "Generic (PLEG): container finished" podID="32337771-cf01-4dd9-988a-e42a03c4b287" containerID="316b2e8d82d52a01b4daf84ea2d968154656b4c0c29628563706de35132a6f82" exitCode=0 Sep 29 20:24:38 crc kubenswrapper[4792]: I0929 20:24:38.505130 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-jmchx" event={"ID":"32337771-cf01-4dd9-988a-e42a03c4b287","Type":"ContainerDied","Data":"316b2e8d82d52a01b4daf84ea2d968154656b4c0c29628563706de35132a6f82"} Sep 29 20:24:39 crc kubenswrapper[4792]: I0929 20:24:39.515068 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-jmchx" event={"ID":"32337771-cf01-4dd9-988a-e42a03c4b287","Type":"ContainerStarted","Data":"31f42f3d8c4fb16e2158cedf9e0dbc7b56d630d76350a3a87432893ae7737187"} Sep 29 20:24:39 crc kubenswrapper[4792]: I0929 20:24:39.533612 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-jmchx" podStartSLOduration=3.034297049 podStartE2EDuration="5.533592223s" podCreationTimestamp="2025-09-29 20:24:34 +0000 UTC" firstStartedPulling="2025-09-29 20:24:36.477529248 +0000 UTC m=+5288.470836654" lastFinishedPulling="2025-09-29 20:24:38.976824432 +0000 UTC m=+5290.970131828" observedRunningTime="2025-09-29 20:24:39.533241524 +0000 UTC m=+5291.526548930" watchObservedRunningTime="2025-09-29 20:24:39.533592223 +0000 UTC m=+5291.526899629" Sep 29 20:24:41 crc kubenswrapper[4792]: I0929 20:24:41.959350 4792 patch_prober.go:28] interesting pod/machine-config-daemon-p5q59 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 20:24:41 crc kubenswrapper[4792]: I0929 20:24:41.959727 4792 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" podUID="0ae66548-086e-4ca9-bd6f-281ce46e7557" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 20:24:41 crc kubenswrapper[4792]: I0929 20:24:41.959776 4792 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" Sep 29 20:24:41 crc kubenswrapper[4792]: I0929 20:24:41.960840 4792 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"235c8fe40377a9db11f48a0adeb2b92f75cf88c383f3cca6400f6614f8ced329"} pod="openshift-machine-config-operator/machine-config-daemon-p5q59" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 29 20:24:41 crc kubenswrapper[4792]: I0929 20:24:41.960963 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" podUID="0ae66548-086e-4ca9-bd6f-281ce46e7557" containerName="machine-config-daemon" containerID="cri-o://235c8fe40377a9db11f48a0adeb2b92f75cf88c383f3cca6400f6614f8ced329" gracePeriod=600 Sep 29 20:24:42 crc kubenswrapper[4792]: E0929 20:24:42.091502 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p5q59_openshift-machine-config-operator(0ae66548-086e-4ca9-bd6f-281ce46e7557)\"" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" podUID="0ae66548-086e-4ca9-bd6f-281ce46e7557" Sep 29 20:24:42 crc kubenswrapper[4792]: I0929 20:24:42.548415 4792 generic.go:334] "Generic (PLEG): container finished" podID="0ae66548-086e-4ca9-bd6f-281ce46e7557" containerID="235c8fe40377a9db11f48a0adeb2b92f75cf88c383f3cca6400f6614f8ced329" exitCode=0 Sep 29 20:24:42 crc kubenswrapper[4792]: I0929 20:24:42.548469 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" event={"ID":"0ae66548-086e-4ca9-bd6f-281ce46e7557","Type":"ContainerDied","Data":"235c8fe40377a9db11f48a0adeb2b92f75cf88c383f3cca6400f6614f8ced329"} Sep 29 20:24:42 crc kubenswrapper[4792]: I0929 20:24:42.548517 4792 scope.go:117] "RemoveContainer" containerID="879c3d6f11c2b7321b612ee9d2f622b0df24ff3872f25e237d9ed363b4b76971" Sep 29 20:24:42 crc kubenswrapper[4792]: I0929 20:24:42.549510 4792 scope.go:117] "RemoveContainer" containerID="235c8fe40377a9db11f48a0adeb2b92f75cf88c383f3cca6400f6614f8ced329" Sep 29 20:24:42 crc kubenswrapper[4792]: E0929 20:24:42.549920 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p5q59_openshift-machine-config-operator(0ae66548-086e-4ca9-bd6f-281ce46e7557)\"" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" podUID="0ae66548-086e-4ca9-bd6f-281ce46e7557" Sep 29 20:24:45 crc kubenswrapper[4792]: I0929 20:24:45.144960 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-jmchx" Sep 29 20:24:45 crc kubenswrapper[4792]: I0929 20:24:45.145263 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-jmchx" Sep 29 20:24:45 crc kubenswrapper[4792]: I0929 20:24:45.192827 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-jmchx" Sep 29 20:24:45 crc kubenswrapper[4792]: I0929 20:24:45.634104 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-jmchx" Sep 29 20:24:45 crc kubenswrapper[4792]: I0929 20:24:45.684882 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-jmchx"] Sep 29 20:24:47 crc kubenswrapper[4792]: I0929 20:24:47.602020 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-jmchx" podUID="32337771-cf01-4dd9-988a-e42a03c4b287" containerName="registry-server" containerID="cri-o://31f42f3d8c4fb16e2158cedf9e0dbc7b56d630d76350a3a87432893ae7737187" gracePeriod=2 Sep 29 20:24:48 crc kubenswrapper[4792]: I0929 20:24:48.050552 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-jmchx" Sep 29 20:24:48 crc kubenswrapper[4792]: I0929 20:24:48.140635 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qbrzh\" (UniqueName: \"kubernetes.io/projected/32337771-cf01-4dd9-988a-e42a03c4b287-kube-api-access-qbrzh\") pod \"32337771-cf01-4dd9-988a-e42a03c4b287\" (UID: \"32337771-cf01-4dd9-988a-e42a03c4b287\") " Sep 29 20:24:48 crc kubenswrapper[4792]: I0929 20:24:48.140706 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/32337771-cf01-4dd9-988a-e42a03c4b287-catalog-content\") pod \"32337771-cf01-4dd9-988a-e42a03c4b287\" (UID: \"32337771-cf01-4dd9-988a-e42a03c4b287\") " Sep 29 20:24:48 crc kubenswrapper[4792]: I0929 20:24:48.140798 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/32337771-cf01-4dd9-988a-e42a03c4b287-utilities\") pod \"32337771-cf01-4dd9-988a-e42a03c4b287\" (UID: \"32337771-cf01-4dd9-988a-e42a03c4b287\") " Sep 29 20:24:48 crc kubenswrapper[4792]: I0929 20:24:48.142091 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/32337771-cf01-4dd9-988a-e42a03c4b287-utilities" (OuterVolumeSpecName: "utilities") pod "32337771-cf01-4dd9-988a-e42a03c4b287" (UID: "32337771-cf01-4dd9-988a-e42a03c4b287"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 20:24:48 crc kubenswrapper[4792]: I0929 20:24:48.146541 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/32337771-cf01-4dd9-988a-e42a03c4b287-kube-api-access-qbrzh" (OuterVolumeSpecName: "kube-api-access-qbrzh") pod "32337771-cf01-4dd9-988a-e42a03c4b287" (UID: "32337771-cf01-4dd9-988a-e42a03c4b287"). InnerVolumeSpecName "kube-api-access-qbrzh". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 20:24:48 crc kubenswrapper[4792]: I0929 20:24:48.155672 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/32337771-cf01-4dd9-988a-e42a03c4b287-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "32337771-cf01-4dd9-988a-e42a03c4b287" (UID: "32337771-cf01-4dd9-988a-e42a03c4b287"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 20:24:48 crc kubenswrapper[4792]: I0929 20:24:48.242694 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qbrzh\" (UniqueName: \"kubernetes.io/projected/32337771-cf01-4dd9-988a-e42a03c4b287-kube-api-access-qbrzh\") on node \"crc\" DevicePath \"\"" Sep 29 20:24:48 crc kubenswrapper[4792]: I0929 20:24:48.242732 4792 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/32337771-cf01-4dd9-988a-e42a03c4b287-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 20:24:48 crc kubenswrapper[4792]: I0929 20:24:48.242745 4792 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/32337771-cf01-4dd9-988a-e42a03c4b287-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 20:24:48 crc kubenswrapper[4792]: I0929 20:24:48.615585 4792 generic.go:334] "Generic (PLEG): container finished" podID="32337771-cf01-4dd9-988a-e42a03c4b287" containerID="31f42f3d8c4fb16e2158cedf9e0dbc7b56d630d76350a3a87432893ae7737187" exitCode=0 Sep 29 20:24:48 crc kubenswrapper[4792]: I0929 20:24:48.615685 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-jmchx" event={"ID":"32337771-cf01-4dd9-988a-e42a03c4b287","Type":"ContainerDied","Data":"31f42f3d8c4fb16e2158cedf9e0dbc7b56d630d76350a3a87432893ae7737187"} Sep 29 20:24:48 crc kubenswrapper[4792]: I0929 20:24:48.616958 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-jmchx" event={"ID":"32337771-cf01-4dd9-988a-e42a03c4b287","Type":"ContainerDied","Data":"12e46709f1191306b9ae44cf6022415407072f8676bca3fe6c4f22924f5d75dc"} Sep 29 20:24:48 crc kubenswrapper[4792]: I0929 20:24:48.616987 4792 scope.go:117] "RemoveContainer" containerID="31f42f3d8c4fb16e2158cedf9e0dbc7b56d630d76350a3a87432893ae7737187" Sep 29 20:24:48 crc kubenswrapper[4792]: I0929 20:24:48.615704 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-jmchx" Sep 29 20:24:48 crc kubenswrapper[4792]: I0929 20:24:48.644617 4792 scope.go:117] "RemoveContainer" containerID="316b2e8d82d52a01b4daf84ea2d968154656b4c0c29628563706de35132a6f82" Sep 29 20:24:48 crc kubenswrapper[4792]: I0929 20:24:48.695068 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-jmchx"] Sep 29 20:24:48 crc kubenswrapper[4792]: I0929 20:24:48.696011 4792 scope.go:117] "RemoveContainer" containerID="7612d004ac4b1a96bf0404f925edb4ef0795e8cb855f2992eb545b9d491c40c2" Sep 29 20:24:48 crc kubenswrapper[4792]: I0929 20:24:48.703494 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-jmchx"] Sep 29 20:24:48 crc kubenswrapper[4792]: I0929 20:24:48.744343 4792 scope.go:117] "RemoveContainer" containerID="31f42f3d8c4fb16e2158cedf9e0dbc7b56d630d76350a3a87432893ae7737187" Sep 29 20:24:48 crc kubenswrapper[4792]: E0929 20:24:48.744841 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"31f42f3d8c4fb16e2158cedf9e0dbc7b56d630d76350a3a87432893ae7737187\": container with ID starting with 31f42f3d8c4fb16e2158cedf9e0dbc7b56d630d76350a3a87432893ae7737187 not found: ID does not exist" containerID="31f42f3d8c4fb16e2158cedf9e0dbc7b56d630d76350a3a87432893ae7737187" Sep 29 20:24:48 crc kubenswrapper[4792]: I0929 20:24:48.745015 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"31f42f3d8c4fb16e2158cedf9e0dbc7b56d630d76350a3a87432893ae7737187"} err="failed to get container status \"31f42f3d8c4fb16e2158cedf9e0dbc7b56d630d76350a3a87432893ae7737187\": rpc error: code = NotFound desc = could not find container \"31f42f3d8c4fb16e2158cedf9e0dbc7b56d630d76350a3a87432893ae7737187\": container with ID starting with 31f42f3d8c4fb16e2158cedf9e0dbc7b56d630d76350a3a87432893ae7737187 not found: ID does not exist" Sep 29 20:24:48 crc kubenswrapper[4792]: I0929 20:24:48.745057 4792 scope.go:117] "RemoveContainer" containerID="316b2e8d82d52a01b4daf84ea2d968154656b4c0c29628563706de35132a6f82" Sep 29 20:24:48 crc kubenswrapper[4792]: E0929 20:24:48.745427 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"316b2e8d82d52a01b4daf84ea2d968154656b4c0c29628563706de35132a6f82\": container with ID starting with 316b2e8d82d52a01b4daf84ea2d968154656b4c0c29628563706de35132a6f82 not found: ID does not exist" containerID="316b2e8d82d52a01b4daf84ea2d968154656b4c0c29628563706de35132a6f82" Sep 29 20:24:48 crc kubenswrapper[4792]: I0929 20:24:48.745460 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"316b2e8d82d52a01b4daf84ea2d968154656b4c0c29628563706de35132a6f82"} err="failed to get container status \"316b2e8d82d52a01b4daf84ea2d968154656b4c0c29628563706de35132a6f82\": rpc error: code = NotFound desc = could not find container \"316b2e8d82d52a01b4daf84ea2d968154656b4c0c29628563706de35132a6f82\": container with ID starting with 316b2e8d82d52a01b4daf84ea2d968154656b4c0c29628563706de35132a6f82 not found: ID does not exist" Sep 29 20:24:48 crc kubenswrapper[4792]: I0929 20:24:48.745486 4792 scope.go:117] "RemoveContainer" containerID="7612d004ac4b1a96bf0404f925edb4ef0795e8cb855f2992eb545b9d491c40c2" Sep 29 20:24:48 crc kubenswrapper[4792]: E0929 20:24:48.746069 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7612d004ac4b1a96bf0404f925edb4ef0795e8cb855f2992eb545b9d491c40c2\": container with ID starting with 7612d004ac4b1a96bf0404f925edb4ef0795e8cb855f2992eb545b9d491c40c2 not found: ID does not exist" containerID="7612d004ac4b1a96bf0404f925edb4ef0795e8cb855f2992eb545b9d491c40c2" Sep 29 20:24:48 crc kubenswrapper[4792]: I0929 20:24:48.746192 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7612d004ac4b1a96bf0404f925edb4ef0795e8cb855f2992eb545b9d491c40c2"} err="failed to get container status \"7612d004ac4b1a96bf0404f925edb4ef0795e8cb855f2992eb545b9d491c40c2\": rpc error: code = NotFound desc = could not find container \"7612d004ac4b1a96bf0404f925edb4ef0795e8cb855f2992eb545b9d491c40c2\": container with ID starting with 7612d004ac4b1a96bf0404f925edb4ef0795e8cb855f2992eb545b9d491c40c2 not found: ID does not exist" Sep 29 20:24:49 crc kubenswrapper[4792]: I0929 20:24:49.028047 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="32337771-cf01-4dd9-988a-e42a03c4b287" path="/var/lib/kubelet/pods/32337771-cf01-4dd9-988a-e42a03c4b287/volumes" Sep 29 20:24:53 crc kubenswrapper[4792]: I0929 20:24:53.016311 4792 scope.go:117] "RemoveContainer" containerID="235c8fe40377a9db11f48a0adeb2b92f75cf88c383f3cca6400f6614f8ced329" Sep 29 20:24:53 crc kubenswrapper[4792]: E0929 20:24:53.017333 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p5q59_openshift-machine-config-operator(0ae66548-086e-4ca9-bd6f-281ce46e7557)\"" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" podUID="0ae66548-086e-4ca9-bd6f-281ce46e7557" Sep 29 20:25:01 crc kubenswrapper[4792]: I0929 20:25:01.877915 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-fhxmq"] Sep 29 20:25:01 crc kubenswrapper[4792]: E0929 20:25:01.879679 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="32337771-cf01-4dd9-988a-e42a03c4b287" containerName="extract-utilities" Sep 29 20:25:01 crc kubenswrapper[4792]: I0929 20:25:01.879700 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="32337771-cf01-4dd9-988a-e42a03c4b287" containerName="extract-utilities" Sep 29 20:25:01 crc kubenswrapper[4792]: E0929 20:25:01.879749 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="32337771-cf01-4dd9-988a-e42a03c4b287" containerName="registry-server" Sep 29 20:25:01 crc kubenswrapper[4792]: I0929 20:25:01.879758 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="32337771-cf01-4dd9-988a-e42a03c4b287" containerName="registry-server" Sep 29 20:25:01 crc kubenswrapper[4792]: E0929 20:25:01.879775 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="32337771-cf01-4dd9-988a-e42a03c4b287" containerName="extract-content" Sep 29 20:25:01 crc kubenswrapper[4792]: I0929 20:25:01.879784 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="32337771-cf01-4dd9-988a-e42a03c4b287" containerName="extract-content" Sep 29 20:25:01 crc kubenswrapper[4792]: I0929 20:25:01.880081 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="32337771-cf01-4dd9-988a-e42a03c4b287" containerName="registry-server" Sep 29 20:25:01 crc kubenswrapper[4792]: I0929 20:25:01.881800 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-fhxmq" Sep 29 20:25:01 crc kubenswrapper[4792]: I0929 20:25:01.886091 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-fhxmq"] Sep 29 20:25:01 crc kubenswrapper[4792]: I0929 20:25:01.919645 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b5ab0272-4375-472d-bbad-f154897d6364-catalog-content\") pod \"certified-operators-fhxmq\" (UID: \"b5ab0272-4375-472d-bbad-f154897d6364\") " pod="openshift-marketplace/certified-operators-fhxmq" Sep 29 20:25:01 crc kubenswrapper[4792]: I0929 20:25:01.919718 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b5ab0272-4375-472d-bbad-f154897d6364-utilities\") pod \"certified-operators-fhxmq\" (UID: \"b5ab0272-4375-472d-bbad-f154897d6364\") " pod="openshift-marketplace/certified-operators-fhxmq" Sep 29 20:25:01 crc kubenswrapper[4792]: I0929 20:25:01.920016 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cj8ff\" (UniqueName: \"kubernetes.io/projected/b5ab0272-4375-472d-bbad-f154897d6364-kube-api-access-cj8ff\") pod \"certified-operators-fhxmq\" (UID: \"b5ab0272-4375-472d-bbad-f154897d6364\") " pod="openshift-marketplace/certified-operators-fhxmq" Sep 29 20:25:02 crc kubenswrapper[4792]: I0929 20:25:02.021527 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b5ab0272-4375-472d-bbad-f154897d6364-catalog-content\") pod \"certified-operators-fhxmq\" (UID: \"b5ab0272-4375-472d-bbad-f154897d6364\") " pod="openshift-marketplace/certified-operators-fhxmq" Sep 29 20:25:02 crc kubenswrapper[4792]: I0929 20:25:02.021610 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b5ab0272-4375-472d-bbad-f154897d6364-utilities\") pod \"certified-operators-fhxmq\" (UID: \"b5ab0272-4375-472d-bbad-f154897d6364\") " pod="openshift-marketplace/certified-operators-fhxmq" Sep 29 20:25:02 crc kubenswrapper[4792]: I0929 20:25:02.021719 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cj8ff\" (UniqueName: \"kubernetes.io/projected/b5ab0272-4375-472d-bbad-f154897d6364-kube-api-access-cj8ff\") pod \"certified-operators-fhxmq\" (UID: \"b5ab0272-4375-472d-bbad-f154897d6364\") " pod="openshift-marketplace/certified-operators-fhxmq" Sep 29 20:25:02 crc kubenswrapper[4792]: I0929 20:25:02.022167 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b5ab0272-4375-472d-bbad-f154897d6364-catalog-content\") pod \"certified-operators-fhxmq\" (UID: \"b5ab0272-4375-472d-bbad-f154897d6364\") " pod="openshift-marketplace/certified-operators-fhxmq" Sep 29 20:25:02 crc kubenswrapper[4792]: I0929 20:25:02.022529 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b5ab0272-4375-472d-bbad-f154897d6364-utilities\") pod \"certified-operators-fhxmq\" (UID: \"b5ab0272-4375-472d-bbad-f154897d6364\") " pod="openshift-marketplace/certified-operators-fhxmq" Sep 29 20:25:02 crc kubenswrapper[4792]: I0929 20:25:02.042560 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cj8ff\" (UniqueName: \"kubernetes.io/projected/b5ab0272-4375-472d-bbad-f154897d6364-kube-api-access-cj8ff\") pod \"certified-operators-fhxmq\" (UID: \"b5ab0272-4375-472d-bbad-f154897d6364\") " pod="openshift-marketplace/certified-operators-fhxmq" Sep 29 20:25:02 crc kubenswrapper[4792]: I0929 20:25:02.214441 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-fhxmq" Sep 29 20:25:02 crc kubenswrapper[4792]: I0929 20:25:02.543617 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-fhxmq"] Sep 29 20:25:02 crc kubenswrapper[4792]: I0929 20:25:02.750833 4792 generic.go:334] "Generic (PLEG): container finished" podID="b5ab0272-4375-472d-bbad-f154897d6364" containerID="a0ac9340f6c3d52f0c590894f9a27936fb8f01b6733cab870e6d59f1f72b294c" exitCode=0 Sep 29 20:25:02 crc kubenswrapper[4792]: I0929 20:25:02.750935 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-fhxmq" event={"ID":"b5ab0272-4375-472d-bbad-f154897d6364","Type":"ContainerDied","Data":"a0ac9340f6c3d52f0c590894f9a27936fb8f01b6733cab870e6d59f1f72b294c"} Sep 29 20:25:02 crc kubenswrapper[4792]: I0929 20:25:02.751131 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-fhxmq" event={"ID":"b5ab0272-4375-472d-bbad-f154897d6364","Type":"ContainerStarted","Data":"faf60555037fd0a0b63810ff0191f86bdc0190859d4ce007084f47e8753d5bd1"} Sep 29 20:25:03 crc kubenswrapper[4792]: I0929 20:25:03.766637 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-fhxmq" event={"ID":"b5ab0272-4375-472d-bbad-f154897d6364","Type":"ContainerStarted","Data":"3b47a2a7dee850870359e138ecb6b1c80071a0e4f4c680b8bb33bc890bd5ed2f"} Sep 29 20:25:05 crc kubenswrapper[4792]: I0929 20:25:05.797604 4792 generic.go:334] "Generic (PLEG): container finished" podID="b5ab0272-4375-472d-bbad-f154897d6364" containerID="3b47a2a7dee850870359e138ecb6b1c80071a0e4f4c680b8bb33bc890bd5ed2f" exitCode=0 Sep 29 20:25:05 crc kubenswrapper[4792]: I0929 20:25:05.797693 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-fhxmq" event={"ID":"b5ab0272-4375-472d-bbad-f154897d6364","Type":"ContainerDied","Data":"3b47a2a7dee850870359e138ecb6b1c80071a0e4f4c680b8bb33bc890bd5ed2f"} Sep 29 20:25:06 crc kubenswrapper[4792]: I0929 20:25:06.810771 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-fhxmq" event={"ID":"b5ab0272-4375-472d-bbad-f154897d6364","Type":"ContainerStarted","Data":"c08f55219b12b09786d33e584281acf3a072bff16806f30ed40cdf498dda04e0"} Sep 29 20:25:06 crc kubenswrapper[4792]: I0929 20:25:06.839478 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-fhxmq" podStartSLOduration=2.142302531 podStartE2EDuration="5.839457087s" podCreationTimestamp="2025-09-29 20:25:01 +0000 UTC" firstStartedPulling="2025-09-29 20:25:02.752722273 +0000 UTC m=+5314.746029669" lastFinishedPulling="2025-09-29 20:25:06.449876819 +0000 UTC m=+5318.443184225" observedRunningTime="2025-09-29 20:25:06.832494126 +0000 UTC m=+5318.825801532" watchObservedRunningTime="2025-09-29 20:25:06.839457087 +0000 UTC m=+5318.832764493" Sep 29 20:25:07 crc kubenswrapper[4792]: I0929 20:25:07.017527 4792 scope.go:117] "RemoveContainer" containerID="235c8fe40377a9db11f48a0adeb2b92f75cf88c383f3cca6400f6614f8ced329" Sep 29 20:25:07 crc kubenswrapper[4792]: E0929 20:25:07.018340 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p5q59_openshift-machine-config-operator(0ae66548-086e-4ca9-bd6f-281ce46e7557)\"" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" podUID="0ae66548-086e-4ca9-bd6f-281ce46e7557" Sep 29 20:25:12 crc kubenswrapper[4792]: I0929 20:25:12.214996 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-fhxmq" Sep 29 20:25:12 crc kubenswrapper[4792]: I0929 20:25:12.215369 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-fhxmq" Sep 29 20:25:12 crc kubenswrapper[4792]: I0929 20:25:12.257316 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-fhxmq" Sep 29 20:25:12 crc kubenswrapper[4792]: I0929 20:25:12.932758 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-fhxmq" Sep 29 20:25:12 crc kubenswrapper[4792]: I0929 20:25:12.992749 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-fhxmq"] Sep 29 20:25:14 crc kubenswrapper[4792]: I0929 20:25:14.896557 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-fhxmq" podUID="b5ab0272-4375-472d-bbad-f154897d6364" containerName="registry-server" containerID="cri-o://c08f55219b12b09786d33e584281acf3a072bff16806f30ed40cdf498dda04e0" gracePeriod=2 Sep 29 20:25:15 crc kubenswrapper[4792]: I0929 20:25:15.343052 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-fhxmq" Sep 29 20:25:15 crc kubenswrapper[4792]: I0929 20:25:15.388892 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b5ab0272-4375-472d-bbad-f154897d6364-utilities\") pod \"b5ab0272-4375-472d-bbad-f154897d6364\" (UID: \"b5ab0272-4375-472d-bbad-f154897d6364\") " Sep 29 20:25:15 crc kubenswrapper[4792]: I0929 20:25:15.389065 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b5ab0272-4375-472d-bbad-f154897d6364-catalog-content\") pod \"b5ab0272-4375-472d-bbad-f154897d6364\" (UID: \"b5ab0272-4375-472d-bbad-f154897d6364\") " Sep 29 20:25:15 crc kubenswrapper[4792]: I0929 20:25:15.389102 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cj8ff\" (UniqueName: \"kubernetes.io/projected/b5ab0272-4375-472d-bbad-f154897d6364-kube-api-access-cj8ff\") pod \"b5ab0272-4375-472d-bbad-f154897d6364\" (UID: \"b5ab0272-4375-472d-bbad-f154897d6364\") " Sep 29 20:25:15 crc kubenswrapper[4792]: I0929 20:25:15.389840 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b5ab0272-4375-472d-bbad-f154897d6364-utilities" (OuterVolumeSpecName: "utilities") pod "b5ab0272-4375-472d-bbad-f154897d6364" (UID: "b5ab0272-4375-472d-bbad-f154897d6364"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 20:25:15 crc kubenswrapper[4792]: I0929 20:25:15.395215 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b5ab0272-4375-472d-bbad-f154897d6364-kube-api-access-cj8ff" (OuterVolumeSpecName: "kube-api-access-cj8ff") pod "b5ab0272-4375-472d-bbad-f154897d6364" (UID: "b5ab0272-4375-472d-bbad-f154897d6364"). InnerVolumeSpecName "kube-api-access-cj8ff". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 20:25:15 crc kubenswrapper[4792]: I0929 20:25:15.440211 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b5ab0272-4375-472d-bbad-f154897d6364-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b5ab0272-4375-472d-bbad-f154897d6364" (UID: "b5ab0272-4375-472d-bbad-f154897d6364"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 20:25:15 crc kubenswrapper[4792]: I0929 20:25:15.491246 4792 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b5ab0272-4375-472d-bbad-f154897d6364-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 20:25:15 crc kubenswrapper[4792]: I0929 20:25:15.491485 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cj8ff\" (UniqueName: \"kubernetes.io/projected/b5ab0272-4375-472d-bbad-f154897d6364-kube-api-access-cj8ff\") on node \"crc\" DevicePath \"\"" Sep 29 20:25:15 crc kubenswrapper[4792]: I0929 20:25:15.491568 4792 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b5ab0272-4375-472d-bbad-f154897d6364-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 20:25:15 crc kubenswrapper[4792]: I0929 20:25:15.912524 4792 generic.go:334] "Generic (PLEG): container finished" podID="b5ab0272-4375-472d-bbad-f154897d6364" containerID="c08f55219b12b09786d33e584281acf3a072bff16806f30ed40cdf498dda04e0" exitCode=0 Sep 29 20:25:15 crc kubenswrapper[4792]: I0929 20:25:15.912579 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-fhxmq" event={"ID":"b5ab0272-4375-472d-bbad-f154897d6364","Type":"ContainerDied","Data":"c08f55219b12b09786d33e584281acf3a072bff16806f30ed40cdf498dda04e0"} Sep 29 20:25:15 crc kubenswrapper[4792]: I0929 20:25:15.912649 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-fhxmq" event={"ID":"b5ab0272-4375-472d-bbad-f154897d6364","Type":"ContainerDied","Data":"faf60555037fd0a0b63810ff0191f86bdc0190859d4ce007084f47e8753d5bd1"} Sep 29 20:25:15 crc kubenswrapper[4792]: I0929 20:25:15.912648 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-fhxmq" Sep 29 20:25:15 crc kubenswrapper[4792]: I0929 20:25:15.912670 4792 scope.go:117] "RemoveContainer" containerID="c08f55219b12b09786d33e584281acf3a072bff16806f30ed40cdf498dda04e0" Sep 29 20:25:15 crc kubenswrapper[4792]: I0929 20:25:15.945230 4792 scope.go:117] "RemoveContainer" containerID="3b47a2a7dee850870359e138ecb6b1c80071a0e4f4c680b8bb33bc890bd5ed2f" Sep 29 20:25:15 crc kubenswrapper[4792]: I0929 20:25:15.960170 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-fhxmq"] Sep 29 20:25:15 crc kubenswrapper[4792]: I0929 20:25:15.969754 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-fhxmq"] Sep 29 20:25:15 crc kubenswrapper[4792]: I0929 20:25:15.995078 4792 scope.go:117] "RemoveContainer" containerID="a0ac9340f6c3d52f0c590894f9a27936fb8f01b6733cab870e6d59f1f72b294c" Sep 29 20:25:16 crc kubenswrapper[4792]: I0929 20:25:16.029626 4792 scope.go:117] "RemoveContainer" containerID="c08f55219b12b09786d33e584281acf3a072bff16806f30ed40cdf498dda04e0" Sep 29 20:25:16 crc kubenswrapper[4792]: E0929 20:25:16.030123 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c08f55219b12b09786d33e584281acf3a072bff16806f30ed40cdf498dda04e0\": container with ID starting with c08f55219b12b09786d33e584281acf3a072bff16806f30ed40cdf498dda04e0 not found: ID does not exist" containerID="c08f55219b12b09786d33e584281acf3a072bff16806f30ed40cdf498dda04e0" Sep 29 20:25:16 crc kubenswrapper[4792]: I0929 20:25:16.030172 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c08f55219b12b09786d33e584281acf3a072bff16806f30ed40cdf498dda04e0"} err="failed to get container status \"c08f55219b12b09786d33e584281acf3a072bff16806f30ed40cdf498dda04e0\": rpc error: code = NotFound desc = could not find container \"c08f55219b12b09786d33e584281acf3a072bff16806f30ed40cdf498dda04e0\": container with ID starting with c08f55219b12b09786d33e584281acf3a072bff16806f30ed40cdf498dda04e0 not found: ID does not exist" Sep 29 20:25:16 crc kubenswrapper[4792]: I0929 20:25:16.030194 4792 scope.go:117] "RemoveContainer" containerID="3b47a2a7dee850870359e138ecb6b1c80071a0e4f4c680b8bb33bc890bd5ed2f" Sep 29 20:25:16 crc kubenswrapper[4792]: E0929 20:25:16.030563 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3b47a2a7dee850870359e138ecb6b1c80071a0e4f4c680b8bb33bc890bd5ed2f\": container with ID starting with 3b47a2a7dee850870359e138ecb6b1c80071a0e4f4c680b8bb33bc890bd5ed2f not found: ID does not exist" containerID="3b47a2a7dee850870359e138ecb6b1c80071a0e4f4c680b8bb33bc890bd5ed2f" Sep 29 20:25:16 crc kubenswrapper[4792]: I0929 20:25:16.030637 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3b47a2a7dee850870359e138ecb6b1c80071a0e4f4c680b8bb33bc890bd5ed2f"} err="failed to get container status \"3b47a2a7dee850870359e138ecb6b1c80071a0e4f4c680b8bb33bc890bd5ed2f\": rpc error: code = NotFound desc = could not find container \"3b47a2a7dee850870359e138ecb6b1c80071a0e4f4c680b8bb33bc890bd5ed2f\": container with ID starting with 3b47a2a7dee850870359e138ecb6b1c80071a0e4f4c680b8bb33bc890bd5ed2f not found: ID does not exist" Sep 29 20:25:16 crc kubenswrapper[4792]: I0929 20:25:16.030688 4792 scope.go:117] "RemoveContainer" containerID="a0ac9340f6c3d52f0c590894f9a27936fb8f01b6733cab870e6d59f1f72b294c" Sep 29 20:25:16 crc kubenswrapper[4792]: E0929 20:25:16.031208 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a0ac9340f6c3d52f0c590894f9a27936fb8f01b6733cab870e6d59f1f72b294c\": container with ID starting with a0ac9340f6c3d52f0c590894f9a27936fb8f01b6733cab870e6d59f1f72b294c not found: ID does not exist" containerID="a0ac9340f6c3d52f0c590894f9a27936fb8f01b6733cab870e6d59f1f72b294c" Sep 29 20:25:16 crc kubenswrapper[4792]: I0929 20:25:16.031240 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a0ac9340f6c3d52f0c590894f9a27936fb8f01b6733cab870e6d59f1f72b294c"} err="failed to get container status \"a0ac9340f6c3d52f0c590894f9a27936fb8f01b6733cab870e6d59f1f72b294c\": rpc error: code = NotFound desc = could not find container \"a0ac9340f6c3d52f0c590894f9a27936fb8f01b6733cab870e6d59f1f72b294c\": container with ID starting with a0ac9340f6c3d52f0c590894f9a27936fb8f01b6733cab870e6d59f1f72b294c not found: ID does not exist" Sep 29 20:25:17 crc kubenswrapper[4792]: I0929 20:25:17.030510 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b5ab0272-4375-472d-bbad-f154897d6364" path="/var/lib/kubelet/pods/b5ab0272-4375-472d-bbad-f154897d6364/volumes" Sep 29 20:25:21 crc kubenswrapper[4792]: I0929 20:25:21.015231 4792 scope.go:117] "RemoveContainer" containerID="235c8fe40377a9db11f48a0adeb2b92f75cf88c383f3cca6400f6614f8ced329" Sep 29 20:25:21 crc kubenswrapper[4792]: E0929 20:25:21.015767 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p5q59_openshift-machine-config-operator(0ae66548-086e-4ca9-bd6f-281ce46e7557)\"" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" podUID="0ae66548-086e-4ca9-bd6f-281ce46e7557" Sep 29 20:25:33 crc kubenswrapper[4792]: I0929 20:25:33.015425 4792 scope.go:117] "RemoveContainer" containerID="235c8fe40377a9db11f48a0adeb2b92f75cf88c383f3cca6400f6614f8ced329" Sep 29 20:25:33 crc kubenswrapper[4792]: E0929 20:25:33.018013 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p5q59_openshift-machine-config-operator(0ae66548-086e-4ca9-bd6f-281ce46e7557)\"" pod="openshift-machine-config-operator/machine-config-daemon-p5q59" podUID="0ae66548-086e-4ca9-bd6f-281ce46e7557" var/home/core/zuul-output/logs/crc-cloud-workdir-crc-all-logs.tar.gz0000644000175000000000000000005515066565507024464 0ustar coreroot‹íÁ  ÷Om7 €7šÞ'(var/home/core/zuul-output/logs/crc-cloud/0000755000175000000000000000000015066565507017401 5ustar corerootvar/home/core/zuul-output/artifacts/0000755000175000017500000000000015066552524016517 5ustar corecorevar/home/core/zuul-output/docs/0000755000175000017500000000000015066552524015467 5ustar corecore